Add two vector timestamps

The normalized form of the X-Timestamp header looks like a float with a fixed
width to ensure stable string sorting - normalized timestamps look like
"1402464677.04188"

To support overwrites of existing data without modifying the original
timestamp but still maintain consistency a second internal offset
vector is append to the normalized timestamp form which compares and
sorts greater than the fixed width float format but less than a newer
timestamp.  The internalized format of timestamps looks like
"1402464677.04188_0000000000000000" - the portion after the underscore
is the offset and is a formatted hexadecimal integer.

The internalized form is not exposed to clients in responses from Swift.
Normal client operations will not create a timestamp with an offset.

The Timestamp class in common.utils supports internalized and normalized
formatting of timestamps and also comparison of timestamp values.  When the
offset value of a Timestamp is 0 - it's considered insignificant and need not
be represented in the string format; to support backwards compatibility during
a Swift upgrade the internalized and normalized form of a Timestamp with an
insignificant offset are identical.  When a timestamp includes an offset it
will always be represented in the internalized form, but is still excluded
from the normalized form.  Timestamps with an equivalent timestamp portion
(the float part) will compare and order by their offset.  Timestamps with a
greater timestamp portion will always compare and order greater than a
Timestamp with a lesser timestamp regardless of it's offset.  String
comparison and ordering is guaranteed for the internalized string format, and
is backwards compatible for normalized timestamps which do not include an
offset.

The reconciler currently uses a offset bump to ensure that objects can move to
the wrong storage policy and be moved back.  This use-case is valid because
the content represented by the user-facing timestamp is not modified in way.
Future consumers of the offset vector of timestamps should be mindful of HTTP
semantics of If-Modified and take care to avoid deviation in the response from
the object server without an accompanying change to the user facing timestamp.

DocImpact
Implements: blueprint storage-policies
Change-Id: Id85c960b126ec919a481dc62469bf172b7fb8549
This commit is contained in:
Clay Gerrard 2014-06-10 22:17:47 -07:00
parent 8d20e0e927
commit c1dc2fa624
39 changed files with 2035 additions and 896 deletions

View File

@ -24,7 +24,7 @@ import errno
import sqlite3 import sqlite3
from swift.common.utils import normalize_timestamp, lock_parent_directory from swift.common.utils import Timestamp, lock_parent_directory
from swift.common.db import DatabaseBroker, DatabaseConnectionError, \ from swift.common.db import DatabaseBroker, DatabaseConnectionError, \
PENDING_CAP, PICKLE_PROTOCOL, utf8encode PENDING_CAP, PICKLE_PROTOCOL, utf8encode
@ -155,7 +155,7 @@ class AccountBroker(DatabaseBroker):
conn.execute(''' conn.execute('''
UPDATE account_stat SET account = ?, created_at = ?, id = ?, UPDATE account_stat SET account = ?, created_at = ?, id = ?,
put_timestamp = ?, status_changed_at = ? put_timestamp = ?, status_changed_at = ?
''', (self.account, normalize_timestamp(time.time()), str(uuid4()), ''', (self.account, Timestamp(time.time()).internal, str(uuid4()),
put_timestamp, put_timestamp)) put_timestamp, put_timestamp))
def create_policy_stat_table(self, conn): def create_policy_stat_table(self, conn):
@ -293,7 +293,7 @@ class AccountBroker(DatabaseBroker):
""" """
return status == 'DELETED' or ( return status == 'DELETED' or (
container_count in (None, '', 0, '0') and container_count in (None, '', 0, '0') and
float(delete_timestamp) > float(put_timestamp)) Timestamp(delete_timestamp) > Timestamp(put_timestamp))
def _is_deleted(self, conn): def _is_deleted(self, conn):
""" """

View File

@ -18,7 +18,7 @@ import random
from swift import gettext_ as _ from swift import gettext_ as _
from logging import DEBUG from logging import DEBUG
from math import sqrt from math import sqrt
from time import time, ctime from time import time
from eventlet import GreenPool, sleep, Timeout from eventlet import GreenPool, sleep, Timeout
@ -29,7 +29,7 @@ from swift.common.direct_client import direct_delete_container, \
from swift.common.exceptions import ClientException from swift.common.exceptions import ClientException
from swift.common.ring import Ring from swift.common.ring import Ring
from swift.common.utils import get_logger, whataremyips, ismount, \ from swift.common.utils import get_logger, whataremyips, ismount, \
config_true_value config_true_value, Timestamp
from swift.common.daemon import Daemon from swift.common.daemon import Daemon
from swift.common.storage_policy import POLICIES, POLICY_INDEX from swift.common.storage_policy import POLICIES, POLICY_INDEX
@ -229,7 +229,8 @@ class AccountReaper(Daemon):
""" """
begin = time() begin = time()
info = broker.get_info() info = broker.get_info()
if time() - float(info['delete_timestamp']) <= self.delay_reaping: if time() - float(Timestamp(info['delete_timestamp'])) <= \
self.delay_reaping:
return False return False
account = info['account'] account = info['account']
self.logger.info(_('Beginning pass on account %s'), account) self.logger.info(_('Beginning pass on account %s'), account)
@ -281,10 +282,11 @@ class AccountReaper(Daemon):
log += _(', elapsed: %.02fs') % (time() - begin) log += _(', elapsed: %.02fs') % (time() - begin)
self.logger.info(log) self.logger.info(log)
self.logger.timing_since('timing', self.start_time) self.logger.timing_since('timing', self.start_time)
delete_timestamp = Timestamp(info['delete_timestamp'])
if self.stats_containers_remaining and \ if self.stats_containers_remaining and \
begin - float(info['delete_timestamp']) >= self.reap_not_done_after: begin - float(delete_timestamp) >= self.reap_not_done_after:
self.logger.warn(_('Account %s has not been reaped since %s') % self.logger.warn(_('Account %s has not been reaped since %s') %
(account, ctime(float(info['delete_timestamp'])))) (account, delete_timestamp.isoformat))
return True return True
def reap_container(self, account, account_partition, account_nodes, def reap_container(self, account, account_partition, account_nodes,

View File

@ -27,9 +27,9 @@ from swift.common.db import DatabaseConnectionError, DatabaseAlreadyExists
from swift.common.request_helpers import get_param, get_listing_content_type, \ from swift.common.request_helpers import get_param, get_listing_content_type, \
split_and_validate_path split_and_validate_path
from swift.common.utils import get_logger, hash_path, public, \ from swift.common.utils import get_logger, hash_path, public, \
normalize_timestamp, storage_directory, config_true_value, \ Timestamp, storage_directory, config_true_value, \
json, timing_stats, replication, get_log_line json, timing_stats, replication, get_log_line
from swift.common.constraints import check_mount, check_float, check_utf8 from swift.common.constraints import check_mount, valid_timestamp, check_utf8
from swift.common import constraints from swift.common import constraints
from swift.common.db_replicator import ReplicatorRpc from swift.common.db_replicator import ReplicatorRpc
from swift.common.swob import HTTPAccepted, HTTPBadRequest, \ from swift.common.swob import HTTPAccepted, HTTPBadRequest, \
@ -90,14 +90,11 @@ class AccountController(object):
drive, part, account = split_and_validate_path(req, 3) drive, part, account = split_and_validate_path(req, 3)
if self.mount_check and not check_mount(self.root, drive): if self.mount_check and not check_mount(self.root, drive):
return HTTPInsufficientStorage(drive=drive, request=req) return HTTPInsufficientStorage(drive=drive, request=req)
if 'x-timestamp' not in req.headers or \ req_timestamp = valid_timestamp(req)
not check_float(req.headers['x-timestamp']):
return HTTPBadRequest(body='Missing timestamp', request=req,
content_type='text/plain')
broker = self._get_account_broker(drive, part, account) broker = self._get_account_broker(drive, part, account)
if broker.is_deleted(): if broker.is_deleted():
return self._deleted_response(broker, req, HTTPNotFound) return self._deleted_response(broker, req, HTTPNotFound)
broker.delete_db(req.headers['x-timestamp']) broker.delete_db(req_timestamp.internal)
return self._deleted_response(broker, req, HTTPNoContent) return self._deleted_response(broker, req, HTTPNoContent)
@public @public
@ -108,6 +105,10 @@ class AccountController(object):
if self.mount_check and not check_mount(self.root, drive): if self.mount_check and not check_mount(self.root, drive):
return HTTPInsufficientStorage(drive=drive, request=req) return HTTPInsufficientStorage(drive=drive, request=req)
if container: # put account container if container: # put account container
if 'x-timestamp' not in req.headers:
timestamp = Timestamp(time.time())
else:
timestamp = valid_timestamp(req)
pending_timeout = None pending_timeout = None
container_policy_index = req.headers.get(POLICY_INDEX, 0) container_policy_index = req.headers.get(POLICY_INDEX, 0)
if 'x-trans-id' in req.headers: if 'x-trans-id' in req.headers:
@ -117,8 +118,7 @@ class AccountController(object):
if account.startswith(self.auto_create_account_prefix) and \ if account.startswith(self.auto_create_account_prefix) and \
not os.path.exists(broker.db_file): not os.path.exists(broker.db_file):
try: try:
broker.initialize(normalize_timestamp( broker.initialize(timestamp.internal)
req.headers.get('x-timestamp') or time.time()))
except DatabaseAlreadyExists: except DatabaseAlreadyExists:
pass pass
if req.headers.get('x-account-override-deleted', 'no').lower() != \ if req.headers.get('x-account-override-deleted', 'no').lower() != \
@ -135,11 +135,11 @@ class AccountController(object):
else: else:
return HTTPCreated(request=req) return HTTPCreated(request=req)
else: # put account else: # put account
timestamp = valid_timestamp(req)
broker = self._get_account_broker(drive, part, account) broker = self._get_account_broker(drive, part, account)
timestamp = normalize_timestamp(req.headers['x-timestamp'])
if not os.path.exists(broker.db_file): if not os.path.exists(broker.db_file):
try: try:
broker.initialize(timestamp) broker.initialize(timestamp.internal)
created = True created = True
except DatabaseAlreadyExists: except DatabaseAlreadyExists:
created = False created = False
@ -148,11 +148,11 @@ class AccountController(object):
body='Recently deleted') body='Recently deleted')
else: else:
created = broker.is_deleted() created = broker.is_deleted()
broker.update_put_timestamp(timestamp) broker.update_put_timestamp(timestamp.internal)
if broker.is_deleted(): if broker.is_deleted():
return HTTPConflict(request=req) return HTTPConflict(request=req)
metadata = {} metadata = {}
metadata.update((key, (value, timestamp)) metadata.update((key, (value, timestamp.internal))
for key, value in req.headers.iteritems() for key, value in req.headers.iteritems()
if is_sys_or_user_meta('account', key)) if is_sys_or_user_meta('account', key))
if metadata: if metadata:
@ -238,19 +238,14 @@ class AccountController(object):
def POST(self, req): def POST(self, req):
"""Handle HTTP POST request.""" """Handle HTTP POST request."""
drive, part, account = split_and_validate_path(req, 3) drive, part, account = split_and_validate_path(req, 3)
if 'x-timestamp' not in req.headers or \ req_timestamp = valid_timestamp(req)
not check_float(req.headers['x-timestamp']):
return HTTPBadRequest(body='Missing or bad timestamp',
request=req,
content_type='text/plain')
if self.mount_check and not check_mount(self.root, drive): if self.mount_check and not check_mount(self.root, drive):
return HTTPInsufficientStorage(drive=drive, request=req) return HTTPInsufficientStorage(drive=drive, request=req)
broker = self._get_account_broker(drive, part, account) broker = self._get_account_broker(drive, part, account)
if broker.is_deleted(): if broker.is_deleted():
return self._deleted_response(broker, req, HTTPNotFound) return self._deleted_response(broker, req, HTTPNotFound)
timestamp = normalize_timestamp(req.headers['x-timestamp'])
metadata = {} metadata = {}
metadata.update((key, (value, timestamp)) metadata.update((key, (value, req_timestamp.internal))
for key, value in req.headers.iteritems() for key, value in req.headers.iteritems()
if is_sys_or_user_meta('account', key)) if is_sys_or_user_meta('account', key))
if metadata: if metadata:

View File

@ -17,7 +17,7 @@ import time
from xml.sax import saxutils from xml.sax import saxutils
from swift.common.swob import HTTPOk, HTTPNoContent from swift.common.swob import HTTPOk, HTTPNoContent
from swift.common.utils import json, normalize_timestamp from swift.common.utils import json, Timestamp
from swift.common.storage_policy import POLICIES from swift.common.storage_policy import POLICIES
@ -27,7 +27,7 @@ class FakeAccountBroker(object):
like an account broker would for a real, empty account with no metadata. like an account broker would for a real, empty account with no metadata.
""" """
def get_info(self): def get_info(self):
now = normalize_timestamp(time.time()) now = Timestamp(time.time()).internal
return {'container_count': 0, return {'container_count': 0,
'object_count': 0, 'object_count': 0,
'bytes_used': 0, 'bytes_used': 0,
@ -51,8 +51,8 @@ def get_response_headers(broker):
'X-Account-Container-Count': info['container_count'], 'X-Account-Container-Count': info['container_count'],
'X-Account-Object-Count': info['object_count'], 'X-Account-Object-Count': info['object_count'],
'X-Account-Bytes-Used': info['bytes_used'], 'X-Account-Bytes-Used': info['bytes_used'],
'X-Timestamp': info['created_at'], 'X-Timestamp': Timestamp(info['created_at']).normal,
'X-PUT-Timestamp': info['put_timestamp']} 'X-PUT-Timestamp': Timestamp(info['put_timestamp']).normal}
policy_stats = broker.get_policy_stats() policy_stats = broker.get_policy_stats()
for policy_idx, stats in policy_stats.items(): for policy_idx, stats in policy_stats.items():
policy = POLICIES.get_by_index(policy_idx) policy = POLICIES.get_by_index(policy_idx)

View File

@ -14,10 +14,10 @@ import itertools
import os import os
import sqlite3 import sqlite3
import urllib import urllib
from datetime import datetime
from hashlib import md5 from hashlib import md5
from swift.common.utils import hash_path, storage_directory from swift.common.utils import hash_path, storage_directory, \
Timestamp
from swift.common.ring import Ring from swift.common.ring import Ring
from swift.common.request_helpers import is_sys_meta, is_user_meta, \ from swift.common.request_helpers import is_sys_meta, is_user_meta, \
strip_sys_meta_prefix, strip_user_meta_prefix strip_sys_meta_prefix, strip_user_meta_prefix
@ -174,16 +174,16 @@ def print_db_info_metadata(db_type, info, metadata):
print 'Metadata:' print 'Metadata:'
print (' Created at: %s (%s)' % print (' Created at: %s (%s)' %
(datetime.utcfromtimestamp(float(info['created_at'])), (Timestamp(info['created_at']).isoformat,
info['created_at'])) info['created_at']))
print (' Put Timestamp: %s (%s)' % print (' Put Timestamp: %s (%s)' %
(datetime.utcfromtimestamp(float(info['put_timestamp'])), (Timestamp(info['put_timestamp']).isoformat,
info['put_timestamp'])) info['put_timestamp']))
print (' Delete Timestamp: %s (%s)' % print (' Delete Timestamp: %s (%s)' %
(datetime.utcfromtimestamp(float(info['delete_timestamp'])), (Timestamp(info['delete_timestamp']).isoformat,
info['delete_timestamp'])) info['delete_timestamp']))
print (' Status Timestamp: %s (%s)' % print (' Status Timestamp: %s (%s)' %
(datetime.utcfromtimestamp(float(info['status_changed_at'])), (Timestamp(info['status_changed_at']).isoformat,
info['status_changed_at'])) info['status_changed_at']))
if db_type == 'account': if db_type == 'account':
print ' Container Count: %s' % info['container_count'] print ' Container Count: %s' % info['container_count']
@ -197,12 +197,10 @@ def print_db_info_metadata(db_type, info, metadata):
print (' Storage Policy: %s (%s)' % ( print (' Storage Policy: %s (%s)' % (
policy_name, info['storage_policy_index'])) policy_name, info['storage_policy_index']))
print (' Reported Put Timestamp: %s (%s)' % print (' Reported Put Timestamp: %s (%s)' %
(datetime.utcfromtimestamp( (Timestamp(info['reported_put_timestamp']).isoformat,
float(info['reported_put_timestamp'])),
info['reported_put_timestamp'])) info['reported_put_timestamp']))
print (' Reported Delete Timestamp: %s (%s)' % print (' Reported Delete Timestamp: %s (%s)' %
(datetime.utcfromtimestamp (Timestamp(info['reported_delete_timestamp']).isoformat,
(float(info['reported_delete_timestamp'])),
info['reported_delete_timestamp'])) info['reported_delete_timestamp']))
print ' Reported Object Count: %s' % info['reported_object_count'] print ' Reported Object Count: %s' % info['reported_object_count']
print ' Reported Bytes Used: %s' % info['reported_bytes_used'] print ' Reported Bytes Used: %s' % info['reported_bytes_used']
@ -255,7 +253,7 @@ def print_obj_metadata(metadata):
raise ValueError('Metadata is None') raise ValueError('Metadata is None')
path = metadata.pop('name', '') path = metadata.pop('name', '')
content_type = metadata.pop('Content-Type', '') content_type = metadata.pop('Content-Type', '')
ts = metadata.pop('X-Timestamp', 0) ts = Timestamp(metadata.pop('X-Timestamp', 0))
account = container = obj = obj_hash = None account = container = obj = obj_hash = None
if path: if path:
try: try:
@ -276,8 +274,7 @@ def print_obj_metadata(metadata):
else: else:
print 'Content-Type: Not found in metadata' print 'Content-Type: Not found in metadata'
if ts: if ts:
print ('Timestamp: %s (%s)' % print ('Timestamp: %s (%s)' % (ts.isoformat, ts.internal))
(datetime.utcfromtimestamp(float(ts)), ts))
else: else:
print 'Timestamp: Not found in metadata' print 'Timestamp: Not found in metadata'

View File

@ -18,7 +18,7 @@ import urllib
from urllib import unquote from urllib import unquote
from ConfigParser import ConfigParser, NoSectionError, NoOptionError from ConfigParser import ConfigParser, NoSectionError, NoOptionError
from swift.common import utils from swift.common import utils, exceptions
from swift.common.swob import HTTPBadRequest, HTTPLengthRequired, \ from swift.common.swob import HTTPBadRequest, HTTPLengthRequired, \
HTTPRequestEntityTooLarge, HTTPPreconditionFailed HTTPRequestEntityTooLarge, HTTPPreconditionFailed
@ -209,6 +209,22 @@ def check_float(string):
return False return False
def valid_timestamp(request):
"""
Helper function to extract a timestamp from requests that require one.
:param request: the swob request object
:returns: a valid Timestamp instance
:raises: HTTPBadRequest on missing or invalid X-Timestamp
"""
try:
return request.timestamp
except exceptions.InvalidTimestamp as e:
raise HTTPBadRequest(body=str(e), request=request,
content_type='text/plain')
def check_utf8(string): def check_utf8(string):
""" """
Validate if a string is valid UTF-8 str or unicode and that it Validate if a string is valid UTF-8 str or unicode and that it

View File

@ -29,7 +29,7 @@ from tempfile import mkstemp
from eventlet import sleep, Timeout from eventlet import sleep, Timeout
import sqlite3 import sqlite3
from swift.common.utils import json, normalize_timestamp, renamer, \ from swift.common.utils import json, Timestamp, renamer, \
mkdirs, lock_parent_directory, fallocate mkdirs, lock_parent_directory, fallocate
from swift.common.exceptions import LockTimeout from swift.common.exceptions import LockTimeout
@ -144,7 +144,7 @@ def chexor(old, name, timestamp):
:param old: hex representation of the current DB hash :param old: hex representation of the current DB hash
:param name: name of the object or container being inserted :param name: name of the object or container being inserted
:param timestamp: timestamp of the new record :param timestamp: internalized timestamp of the new record
:returns: a hex representation of the new hash value :returns: a hex representation of the new hash value
""" """
if name is None: if name is None:
@ -222,7 +222,7 @@ class DatabaseBroker(object):
The storage_policy_index is passed through to the subclass's The storage_policy_index is passed through to the subclass's
``_initialize`` method. It is ignored by ``AccountBroker``. ``_initialize`` method. It is ignored by ``AccountBroker``.
:param put_timestamp: timestamp of initial PUT request :param put_timestamp: internalized timestamp of initial PUT request
:param storage_policy_index: only required for containers :param storage_policy_index: only required for containers
""" """
if self.db_file == ':memory:': if self.db_file == ':memory:':
@ -280,7 +280,7 @@ class DatabaseBroker(object):
END; END;
""") """)
if not put_timestamp: if not put_timestamp:
put_timestamp = normalize_timestamp(0) put_timestamp = Timestamp(0).internal
self._initialize(conn, put_timestamp, self._initialize(conn, put_timestamp,
storage_policy_index=storage_policy_index) storage_policy_index=storage_policy_index)
conn.commit() conn.commit()
@ -302,9 +302,8 @@ class DatabaseBroker(object):
""" """
Mark the DB as deleted Mark the DB as deleted
:param timestamp: delete timestamp :param timestamp: internalized delete timestamp
""" """
timestamp = normalize_timestamp(timestamp)
# first, clear the metadata # first, clear the metadata
cleared_meta = {} cleared_meta = {}
for k in self.metadata: for k in self.metadata:
@ -463,8 +462,8 @@ class DatabaseBroker(object):
delete_timestamp=MAX(?, delete_timestamp) delete_timestamp=MAX(?, delete_timestamp)
''' % self.db_type, (created_at, put_timestamp, delete_timestamp)) ''' % self.db_type, (created_at, put_timestamp, delete_timestamp))
if old_status != self._is_deleted(conn): if old_status != self._is_deleted(conn):
timestamp = normalize_timestamp(time.time()) timestamp = Timestamp(time.time())
self._update_status_changed_at(conn, timestamp) self._update_status_changed_at(conn, timestamp.internal)
conn.commit() conn.commit()
@ -790,7 +789,7 @@ class DatabaseBroker(object):
Update the put_timestamp. Only modifies it if it is greater than Update the put_timestamp. Only modifies it if it is greater than
the current timestamp. the current timestamp.
:param timestamp: put timestamp :param timestamp: internalized put timestamp
""" """
with self.get() as conn: with self.get() as conn:
conn.execute( conn.execute(
@ -804,6 +803,8 @@ class DatabaseBroker(object):
Update the status_changed_at field in the stat table. Only Update the status_changed_at field in the stat table. Only
modifies status_changed_at if the timestamp is greater than the modifies status_changed_at if the timestamp is greater than the
current status_changed_at timestamp. current status_changed_at timestamp.
:param timestamp: internalized timestamp
""" """
with self.get() as conn: with self.get() as conn:
self._update_status_changed_at(conn, timestamp) self._update_status_changed_at(conn, timestamp)

View File

@ -31,7 +31,7 @@ import swift.common.db
from swift.common.direct_client import quote from swift.common.direct_client import quote
from swift.common.utils import get_logger, whataremyips, storage_directory, \ from swift.common.utils import get_logger, whataremyips, storage_directory, \
renamer, mkdirs, lock_parent_directory, config_true_value, \ renamer, mkdirs, lock_parent_directory, config_true_value, \
unlink_older_than, dump_recon_cache, rsync_ip, ismount, json unlink_older_than, dump_recon_cache, rsync_ip, ismount, json, Timestamp
from swift.common import ring from swift.common import ring
from swift.common.http import HTTP_NOT_FOUND, HTTP_INSUFFICIENT_STORAGE from swift.common.http import HTTP_NOT_FOUND, HTTP_INSUFFICIENT_STORAGE
from swift.common.bufferedhttp import BufferedHTTPConnection from swift.common.bufferedhttp import BufferedHTTPConnection
@ -458,16 +458,8 @@ class Replicator(Daemon):
return return
# The db is considered deleted if the delete_timestamp value is greater # The db is considered deleted if the delete_timestamp value is greater
# than the put_timestamp, and there are no objects. # than the put_timestamp, and there are no objects.
delete_timestamp = 0 delete_timestamp = Timestamp(info.get('delete_timestamp') or 0)
try: put_timestamp = Timestamp(info.get('put_timestamp') or 0)
delete_timestamp = float(info['delete_timestamp'])
except ValueError:
pass
put_timestamp = 0
try:
put_timestamp = float(info['put_timestamp'])
except ValueError:
pass
if delete_timestamp < (now - self.reclaim_age) and \ if delete_timestamp < (now - self.reclaim_age) and \
delete_timestamp > put_timestamp and \ delete_timestamp > put_timestamp and \
info['count'] in (None, '', 0, '0'): info['count'] in (None, '', 0, '0'):

View File

@ -27,7 +27,7 @@ from eventlet import sleep, Timeout
from swift.common.bufferedhttp import http_connect from swift.common.bufferedhttp import http_connect
from swift.common.exceptions import ClientException from swift.common.exceptions import ClientException
from swift.common.utils import normalize_timestamp, FileLikeIter from swift.common.utils import Timestamp, FileLikeIter
from swift.common.http import HTTP_NO_CONTENT, HTTP_INSUFFICIENT_STORAGE, \ from swift.common.http import HTTP_NO_CONTENT, HTTP_INSUFFICIENT_STORAGE, \
is_success, is_server_error is_success, is_server_error
from swift.common.swob import HeaderKeyDict from swift.common.swob import HeaderKeyDict
@ -91,7 +91,7 @@ def _get_direct_account_container(path, stype, node, part,
def gen_headers(hdrs_in=None, add_ts=False): def gen_headers(hdrs_in=None, add_ts=False):
hdrs_out = HeaderKeyDict(hdrs_in) if hdrs_in else HeaderKeyDict() hdrs_out = HeaderKeyDict(hdrs_in) if hdrs_in else HeaderKeyDict()
if add_ts: if add_ts:
hdrs_out['X-Timestamp'] = normalize_timestamp(time()) hdrs_out['X-Timestamp'] = Timestamp(time()).internal
hdrs_out['User-Agent'] = 'direct-client %s' % os.getpid() hdrs_out['User-Agent'] = 'direct-client %s' % os.getpid()
return hdrs_out return hdrs_out

View File

@ -14,6 +14,7 @@
# limitations under the License. # limitations under the License.
from eventlet import Timeout from eventlet import Timeout
import swift.common.utils
class MessageTimeout(Timeout): class MessageTimeout(Timeout):
@ -30,6 +31,10 @@ class SwiftException(Exception):
pass pass
class InvalidTimestamp(SwiftException):
pass
class DiskFileError(SwiftException): class DiskFileError(SwiftException):
pass pass
@ -54,7 +59,8 @@ class DiskFileDeleted(DiskFileNotExist):
def __init__(self, metadata=None): def __init__(self, metadata=None):
self.metadata = metadata or {} self.metadata = metadata or {}
self.timestamp = self.metadata.get('X-Timestamp', 0) self.timestamp = swift.common.utils.Timestamp(
self.metadata.get('X-Timestamp', 0))
class DiskFileExpired(DiskFileDeleted): class DiskFileExpired(DiskFileDeleted):

View File

@ -49,7 +49,8 @@ import random
import functools import functools
import inspect import inspect
from swift.common.utils import reiterate, split_path from swift.common.utils import reiterate, split_path, Timestamp
from swift.common.exceptions import InvalidTimestamp
RESPONSE_REASONS = { RESPONSE_REASONS = {
@ -762,6 +763,7 @@ class Request(object):
body = _req_body_property() body = _req_body_property()
charset = None charset = None
_params_cache = None _params_cache = None
_timestamp = None
acl = _req_environ_property('swob.ACL') acl = _req_environ_property('swob.ACL')
def __init__(self, environ): def __init__(self, environ):
@ -843,6 +845,22 @@ class Request(object):
return self._params_cache return self._params_cache
str_params = params str_params = params
@property
def timestamp(self):
"""
Provides HTTP_X_TIMESTAMP as a :class:`~swift.common.utils.Timestamp`
"""
if self._timestamp is None:
try:
raw_timestamp = self.environ['HTTP_X_TIMESTAMP']
except KeyError:
raise InvalidTimestamp('Missing X-Timestamp header')
try:
self._timestamp = Timestamp(raw_timestamp)
except ValueError:
raise InvalidTimestamp('Invalid X-Timestamp header')
return self._timestamp
@property @property
def path_qs(self): def path_qs(self):
"""The path of the request, without host but with query string.""" """The path of the request, without host but with query string."""

View File

@ -63,7 +63,7 @@ utf8_decoder = codecs.getdecoder('utf-8')
utf8_encoder = codecs.getencoder('utf-8') utf8_encoder = codecs.getencoder('utf-8')
from swift import gettext_ as _ from swift import gettext_ as _
from swift.common.exceptions import LockTimeout, MessageTimeout import swift.common.exceptions
from swift.common.http import is_success, is_redirection, HTTP_NOT_FOUND from swift.common.http import is_success, is_redirection, HTTP_NOT_FOUND
# logging doesn't import patched as cleanly as one would like # logging doesn't import patched as cleanly as one would like
@ -562,6 +562,120 @@ def drop_buffer_cache(fd, offset, length):
'length': length, 'ret': ret}) 'length': length, 'ret': ret})
NORMAL_FORMAT = "%016.05f"
INTERNAL_FORMAT = NORMAL_FORMAT + '_%016x'
# Setting this to True will cause the internal format to always display
# extended digits - even when the value is equivalent to the normalized form.
# This isn't ideal during an upgrade when some servers might not understand
# the new time format - but flipping it to True works great for testing.
FORCE_INTERNAL = False # or True
class Timestamp(object):
"""
Internal Representation of Swift Time.
The normalized form of the X-Timestamp header looks like a float
with a fixed width to ensure stable string sorting - normalized
timestamps look like "1402464677.04188"
To support overwrites of existing data without modifying the original
timestamp but still maintain consistency a second internal offset vector
is append to the normalized timestamp form which compares and sorts
greater than the fixed width float format but less than a newer timestamp.
The internalized format of timestamps looks like
"1402464677.04188_0000000000000000" - the portion after the underscore is
the offset and is a formatted hexadecimal integer.
The internalized form is not exposed to clients in responses from
Swift. Normal client operations will not create a timestamp with an
offset.
The Timestamp class in common.utils supports internalized and
normalized formatting of timestamps and also comparison of timestamp
values. When the offset value of a Timestamp is 0 - it's considered
insignificant and need not be represented in the string format; to
support backwards compatibility during a Swift upgrade the
internalized and normalized form of a Timestamp with an
insignificant offset are identical. When a timestamp includes an
offset it will always be represented in the internalized form, but
is still excluded from the normalized form. Timestamps with an
equivalent timestamp portion (the float part) will compare and order
by their offset. Timestamps with a greater timestamp portion will
always compare and order greater than a Timestamp with a lesser
timestamp regardless of it's offset. String comparison and ordering
is guaranteed for the internalized string format, and is backwards
compatible for normalized timestamps which do not include an offset.
"""
def __init__(self, timestamp, offset=0):
if isinstance(timestamp, basestring):
parts = timestamp.split('_', 1)
self.timestamp = float(parts.pop(0))
if parts:
self.offset = int(parts[0], 16)
else:
self.offset = 0
else:
self.timestamp = float(timestamp)
self.offset = getattr(timestamp, 'offset', 0)
# increment offset
if offset >= 0:
self.offset += offset
else:
raise ValueError('offset must be non-negative')
def __repr__(self):
return INTERNAL_FORMAT % (self.timestamp, self.offset)
def __str__(self):
raise TypeError('You must specificy which string format is required')
def __float__(self):
return self.timestamp
def __int__(self):
return int(self.timestamp)
def __nonzero__(self):
return bool(self.timestamp or self.offset)
@property
def normal(self):
return NORMAL_FORMAT % self.timestamp
@property
def internal(self):
if self.offset or FORCE_INTERNAL:
return INTERNAL_FORMAT % (self.timestamp, self.offset)
else:
return self.normal
@property
def isoformat(self):
isoformat = datetime.datetime.utcfromtimestamp(
float(self.normal)).isoformat()
# python isoformat() doesn't include msecs when zero
if len(isoformat) < len("1970-01-01T00:00:00.000000"):
isoformat += ".000000"
return isoformat
def __eq__(self, other):
if not isinstance(other, Timestamp):
other = Timestamp(other)
return self.internal == other.internal
def __ne__(self, other):
if not isinstance(other, Timestamp):
other = Timestamp(other)
return self.internal != other.internal
def __cmp__(self, other):
if not isinstance(other, Timestamp):
other = Timestamp(other)
return cmp(self.internal, other.internal)
def normalize_timestamp(timestamp): def normalize_timestamp(timestamp):
""" """
Format a timestamp (string or numeric) into a standardized Format a timestamp (string or numeric) into a standardized
@ -574,15 +688,15 @@ def normalize_timestamp(timestamp):
:param timestamp: unix timestamp :param timestamp: unix timestamp
:returns: normalized timestamp as a string :returns: normalized timestamp as a string
""" """
return "%016.05f" % (float(timestamp)) return Timestamp(timestamp).normal
def last_modified_date_to_timestamp(last_modified_date_str): def last_modified_date_to_timestamp(last_modified_date_str):
""" """
Convert a last modified date (liked you'd get from a container listing, Convert a last modified date (like you'd get from a container listing,
e.g. 2014-02-28T23:22:36.698390) to a float. e.g. 2014-02-28T23:22:36.698390) to a float.
""" """
return float( return Timestamp(
datetime.datetime.strptime( datetime.datetime.strptime(
last_modified_date_str, '%Y-%m-%dT%H:%M:%S.%f' last_modified_date_str, '%Y-%m-%dT%H:%M:%S.%f'
).strftime('%s.%f') ).strftime('%s.%f')
@ -1007,7 +1121,7 @@ class LogAdapter(logging.LoggerAdapter, object):
emsg = exc.__class__.__name__ emsg = exc.__class__.__name__
if hasattr(exc, 'seconds'): if hasattr(exc, 'seconds'):
emsg += ' (%ss)' % exc.seconds emsg += ' (%ss)' % exc.seconds
if isinstance(exc, MessageTimeout): if isinstance(exc, swift.common.exceptions.MessageTimeout):
if exc.msg: if exc.msg:
emsg += ' %s' % exc.msg emsg += ' %s' % exc.msg
else: else:
@ -1441,7 +1555,7 @@ def hash_path(account, container=None, object=None, raw_digest=False):
@contextmanager @contextmanager
def lock_path(directory, timeout=10, timeout_class=LockTimeout): def lock_path(directory, timeout=10, timeout_class=None):
""" """
Context manager that acquires a lock on a directory. This will block until Context manager that acquires a lock on a directory. This will block until
the lock can be acquired, or the timeout time has expired (whichever occurs the lock can be acquired, or the timeout time has expired (whichever occurs
@ -1458,6 +1572,8 @@ def lock_path(directory, timeout=10, timeout_class=LockTimeout):
constructed as timeout_class(timeout, lockpath). Default: constructed as timeout_class(timeout, lockpath). Default:
LockTimeout LockTimeout
""" """
if timeout_class is None:
timeout_class = swift.common.exceptions.LockTimeout
mkdirs(directory) mkdirs(directory)
lockpath = '%s/.lock' % directory lockpath = '%s/.lock' % directory
fd = os.open(lockpath, os.O_WRONLY | os.O_CREAT) fd = os.open(lockpath, os.O_WRONLY | os.O_CREAT)
@ -1497,7 +1613,7 @@ def lock_file(filename, timeout=10, append=False, unlink=True):
fd = os.open(filename, flags) fd = os.open(filename, flags)
file_obj = os.fdopen(fd, mode) file_obj = os.fdopen(fd, mode)
try: try:
with LockTimeout(timeout, filename): with swift.common.exceptions.LockTimeout(timeout, filename):
while True: while True:
try: try:
fcntl.flock(fd, fcntl.LOCK_EX | fcntl.LOCK_NB) fcntl.flock(fd, fcntl.LOCK_EX | fcntl.LOCK_NB)

View File

@ -24,7 +24,7 @@ import errno
import sqlite3 import sqlite3
from swift.common.utils import normalize_timestamp, lock_parent_directory from swift.common.utils import Timestamp, lock_parent_directory
from swift.common.db import DatabaseBroker, DatabaseConnectionError, \ from swift.common.db import DatabaseBroker, DatabaseConnectionError, \
PENDING_CAP, PICKLE_PROTOCOL, utf8encode PENDING_CAP, PICKLE_PROTOCOL, utf8encode
@ -202,7 +202,7 @@ class ContainerBroker(DatabaseBroker):
:param storage_policy_index: storage policy index :param storage_policy_index: storage policy index
""" """
if put_timestamp is None: if put_timestamp is None:
put_timestamp = normalize_timestamp(0) put_timestamp = Timestamp(0).internal
# The container_stat view is for compatibility; old versions of Swift # The container_stat view is for compatibility; old versions of Swift
# expected a container_stat table with columns "object_count" and # expected a container_stat table with columns "object_count" and
# "bytes_used", but when that stuff became per-storage-policy and # "bytes_used", but when that stuff became per-storage-policy and
@ -224,7 +224,7 @@ class ContainerBroker(DatabaseBroker):
INSERT INTO container_info (account, container, created_at, id, INSERT INTO container_info (account, container, created_at, id,
put_timestamp, status_changed_at, storage_policy_index) put_timestamp, status_changed_at, storage_policy_index)
VALUES (?, ?, ?, ?, ?, ?, ?); VALUES (?, ?, ?, ?, ?, ?, ?);
""", (self.account, self.container, normalize_timestamp(time.time()), """, (self.account, self.container, Timestamp(time.time()).internal,
str(uuid4()), put_timestamp, put_timestamp, str(uuid4()), put_timestamp, put_timestamp,
storage_policy_index)) storage_policy_index))
@ -372,7 +372,7 @@ class ContainerBroker(DatabaseBroker):
# value is greater than the put_timestamp, and there are no # value is greater than the put_timestamp, and there are no
# objects in the container. # objects in the container.
return (object_count in (None, '', 0, '0')) and ( return (object_count in (None, '', 0, '0')) and (
float(delete_timestamp) > float(put_timestamp)) Timestamp(delete_timestamp) > Timestamp(put_timestamp))
def _is_deleted(self, conn): def _is_deleted(self, conn):
""" """
@ -524,7 +524,7 @@ class ContainerBroker(DatabaseBroker):
Update the container_stat policy_index and status_changed_at. Update the container_stat policy_index and status_changed_at.
""" """
if timestamp is None: if timestamp is None:
timestamp = normalize_timestamp(time.time()) timestamp = Timestamp(time.time()).internal
def _setit(conn): def _setit(conn):
conn.execute(''' conn.execute('''

View File

@ -27,7 +27,7 @@ from swift.common.direct_client import (
from swift.common.internal_client import InternalClient, UnexpectedResponse from swift.common.internal_client import InternalClient, UnexpectedResponse
from swift.common.storage_policy import POLICY_INDEX from swift.common.storage_policy import POLICY_INDEX
from swift.common.utils import get_logger, split_path, quorum_size, \ from swift.common.utils import get_logger, split_path, quorum_size, \
FileLikeIter, normalize_timestamp, last_modified_date_to_timestamp, \ FileLikeIter, Timestamp, last_modified_date_to_timestamp, \
LRUCache LRUCache
@ -68,7 +68,7 @@ def cmp_policy_info(info, remote_info):
def has_been_recreated(info): def has_been_recreated(info):
return (info['put_timestamp'] > info['delete_timestamp'] > return (info['put_timestamp'] > info['delete_timestamp'] >
normalize_timestamp(0)) Timestamp(0))
remote_recreated = has_been_recreated(remote_info) remote_recreated = has_been_recreated(remote_info)
recreated = has_been_recreated(info) recreated = has_been_recreated(info)
@ -98,7 +98,7 @@ def incorrect_policy_index(info, remote_info):
def translate_container_headers_to_info(headers): def translate_container_headers_to_info(headers):
default_timestamp = normalize_timestamp(0) default_timestamp = Timestamp(0).internal
return { return {
'storage_policy_index': int(headers[POLICY_INDEX]), 'storage_policy_index': int(headers[POLICY_INDEX]),
'put_timestamp': headers.get('x-backend-put-timestamp', 'put_timestamp': headers.get('x-backend-put-timestamp',
@ -117,7 +117,7 @@ def best_policy_index(headers):
def get_reconciler_container_name(obj_timestamp): def get_reconciler_container_name(obj_timestamp):
return str(int(float(obj_timestamp)) // return str(int(Timestamp(obj_timestamp)) //
MISPLACED_OBJECTS_CONTAINER_DIVISOR * MISPLACED_OBJECTS_CONTAINER_DIVISOR *
MISPLACED_OBJECTS_CONTAINER_DIVISOR) MISPLACED_OBJECTS_CONTAINER_DIVISOR)
@ -195,7 +195,7 @@ def add_to_reconciler_queue(container_ring, account, container, obj,
# already been popped from the queue to be reprocessed, but # already been popped from the queue to be reprocessed, but
# could potentially prevent out of order updates from making it # could potentially prevent out of order updates from making it
# into the queue # into the queue
x_timestamp = normalize_timestamp(time.time()) x_timestamp = Timestamp(time.time()).internal
else: else:
x_timestamp = obj_timestamp x_timestamp = obj_timestamp
q_op_type = get_reconciler_content_type(op) q_op_type = get_reconciler_content_type(op)
@ -230,10 +230,7 @@ def add_to_reconciler_queue(container_ring, account, container, obj,
def slightly_later_timestamp(ts, offset=1): def slightly_later_timestamp(ts, offset=1):
# I'm guessing to avoid rounding errors Swift uses a 10-microsecond return Timestamp(ts, offset=offset).internal
# resolution instead of Python's 1-microsecond resolution.
offset *= 0.00001
return normalize_timestamp(float(ts) + offset)
def parse_raw_obj(obj_info): def parse_raw_obj(obj_info):
@ -266,7 +263,7 @@ def parse_raw_obj(obj_info):
'container': container, 'container': container,
'obj': obj, 'obj': obj,
'q_op': q_op, 'q_op': q_op,
'q_ts': float(obj_info['hash']), 'q_ts': Timestamp(obj_info['hash']),
'q_record': last_modified_date_to_timestamp( 'q_record': last_modified_date_to_timestamp(
obj_info['last_modified']), obj_info['last_modified']),
'path': '/%s/%s/%s' % (account, container, obj) 'path': '/%s/%s/%s' % (account, container, obj)
@ -407,7 +404,7 @@ class ContainerReconciler(Daemon):
success = False success = False
try: try:
self.swift.delete_object(account, container, obj, self.swift.delete_object(account, container, obj,
acceptable_statuses=(2, 4), acceptable_statuses=(2, 404),
headers=headers) headers=headers)
except UnexpectedResponse as err: except UnexpectedResponse as err:
self.stats_log('cleanup_failed', '%r (%f) was not cleaned up ' self.stats_log('cleanup_failed', '%r (%f) was not cleaned up '
@ -430,7 +427,7 @@ class ContainerReconciler(Daemon):
:param obj: the object name :param obj: the object name
:param q_policy_index: the policy index of the source indicated by the :param q_policy_index: the policy index of the source indicated by the
queue entry. queue entry.
:param q_ts: a float, the timestamp of the misplaced object :param q_ts: the timestamp of the misplaced object
:param q_op: the operation of the misplaced request :param q_op: the operation of the misplaced request
:param path: the full path of the misplaced object for logging :param path: the full path of the misplaced object for logging
@ -459,12 +456,7 @@ class ContainerReconciler(Daemon):
dest_obj = self.swift.get_object_metadata(account, container, obj, dest_obj = self.swift.get_object_metadata(account, container, obj,
headers=headers, headers=headers,
acceptable_statuses=(2, 4)) acceptable_statuses=(2, 4))
dest_ts = float( dest_ts = Timestamp(dest_obj.get('x-backend-timestamp', 0))
dest_obj.get('x-timestamp',
dest_obj.get('x-backend-timestamp',
'0.0')
)
)
if dest_ts >= q_ts: if dest_ts >= q_ts:
self.stats_log('found_object', '%r (%f) in policy_index %s ' self.stats_log('found_object', '%r (%f) in policy_index %s '
'is newer than queue (%f)', path, dest_ts, 'is newer than queue (%f)', path, dest_ts,
@ -492,7 +484,7 @@ class ContainerReconciler(Daemon):
source_obj_info = {} source_obj_info = {}
source_obj_iter = None source_obj_iter = None
source_ts = float(source_obj_info.get("X-Timestamp", 0)) source_ts = Timestamp(source_obj_info.get('x-backend-timestamp', 0))
if source_obj_status == 404 and q_op == 'DELETE': if source_obj_status == 404 and q_op == 'DELETE':
return self.ensure_tombstone_in_right_location( return self.ensure_tombstone_in_right_location(
q_policy_index, account, container, obj, q_ts, path, q_policy_index, account, container, obj, q_ts, path,
@ -516,10 +508,10 @@ class ContainerReconciler(Daemon):
:param account: the account name of the misplaced object :param account: the account name of the misplaced object
:param container: the container name of the misplaced object :param container: the container name of the misplaced object
:param obj: the name of the misplaced object :param obj: the name of the misplaced object
:param q_ts: a float, the timestamp of the misplaced object :param q_ts: the timestamp of the misplaced object
:param path: the full path of the misplaced object for logging :param path: the full path of the misplaced object for logging
:param container_policy_index: the policy_index of the destination :param container_policy_index: the policy_index of the destination
:param source_ts: a float, the timestamp of the source object :param source_ts: the timestamp of the source object
:param source_obj_status: the HTTP status source object request :param source_obj_status: the HTTP status source object request
:param source_obj_info: the HTTP headers of the source object request :param source_obj_info: the HTTP headers of the source object request
:param source_obj_iter: the body iter of the source object request :param source_obj_iter: the body iter of the source object request
@ -527,21 +519,22 @@ class ContainerReconciler(Daemon):
if source_obj_status // 100 != 2 or source_ts < q_ts: if source_obj_status // 100 != 2 or source_ts < q_ts:
if q_ts < time.time() - self.reclaim_age: if q_ts < time.time() - self.reclaim_age:
# it's old and there are no tombstones or anything; give up # it's old and there are no tombstones or anything; give up
self.stats_log('lost_source', '%r (%f) was not available in ' self.stats_log('lost_source', '%r (%s) was not available in '
'policy_index %s and has expired', path, q_ts, 'policy_index %s and has expired', path,
q_policy_index, level=logging.CRITICAL) q_ts.internal, q_policy_index,
level=logging.CRITICAL)
return True return True
# the source object is unavailable or older than the queue # the source object is unavailable or older than the queue
# entry; a version that will satisfy the queue entry hopefully # entry; a version that will satisfy the queue entry hopefully
# exists somewhere in the cluster, so wait and try again # exists somewhere in the cluster, so wait and try again
self.stats_log('unavailable_source', '%r (%f) in ' self.stats_log('unavailable_source', '%r (%s) in '
'policy_index %s responded %s (%f)', path, 'policy_index %s responded %s (%s)', path,
q_ts, q_policy_index, source_obj_status, q_ts.internal, q_policy_index, source_obj_status,
source_ts, level=logging.WARNING) source_ts.internal, level=logging.WARNING)
return False return False
# optimistically move any source with a timestamp >= q_ts # optimistically move any source with a timestamp >= q_ts
ts = max(float(source_ts), q_ts) ts = max(Timestamp(source_ts), q_ts)
# move the object # move the object
put_timestamp = slightly_later_timestamp(ts, offset=2) put_timestamp = slightly_later_timestamp(ts, offset=2)
self.stats_log('copy_attempt', '%r (%f) in policy_index %s will be ' self.stats_log('copy_attempt', '%r (%f) in policy_index %s will be '
@ -638,7 +631,7 @@ class ContainerReconciler(Daemon):
success = self._reconcile_object(**info) success = self._reconcile_object(**info)
except: # noqa except: # noqa
self.logger.exception('Unhandled Exception trying to ' self.logger.exception('Unhandled Exception trying to '
'reconcile %r (%s) in policy_index %s', 'reconcile %r (%f) in policy_index %s',
info['path'], info['q_ts'], info['path'], info['q_ts'],
info['q_policy_index']) info['q_policy_index'])
if success: if success:

View File

@ -28,7 +28,7 @@ from swift.common.storage_policy import POLICIES
from swift.common.exceptions import DeviceUnavailable from swift.common.exceptions import DeviceUnavailable
from swift.common.http import is_success from swift.common.http import is_success
from swift.common.db import DatabaseAlreadyExists from swift.common.db import DatabaseAlreadyExists
from swift.common.utils import (json, normalize_timestamp, hash_path, from swift.common.utils import (json, Timestamp, hash_path,
storage_directory, quorum_size) storage_directory, quorum_size)
@ -59,10 +59,10 @@ class ContainerReplicator(db_replicator.Replicator):
if is_success(response.status): if is_success(response.status):
remote_info = json.loads(response.data) remote_info = json.loads(response.data)
if incorrect_policy_index(info, remote_info): if incorrect_policy_index(info, remote_info):
status_changed_at = normalize_timestamp(time.time()) status_changed_at = Timestamp(time.time())
broker.set_storage_policy_index( broker.set_storage_policy_index(
remote_info['storage_policy_index'], remote_info['storage_policy_index'],
timestamp=status_changed_at) timestamp=status_changed_at.internal)
broker.merge_timestamps(*(remote_info[key] for key in ( broker.merge_timestamps(*(remote_info[key] for key in (
'created_at', 'put_timestamp', 'delete_timestamp'))) 'created_at', 'put_timestamp', 'delete_timestamp')))
rv = parent._handle_sync_response( rv = parent._handle_sync_response(
@ -256,7 +256,7 @@ class ContainerReplicatorRpc(db_replicator.ReplicatorRpc):
""" """
info = broker.get_replication_info() info = broker.get_replication_info()
if incorrect_policy_index(info, remote_info): if incorrect_policy_index(info, remote_info):
status_changed_at = normalize_timestamp(time.time()) status_changed_at = Timestamp(time.time()).internal
broker.set_storage_policy_index( broker.set_storage_policy_index(
remote_info['storage_policy_index'], remote_info['storage_policy_index'],
timestamp=status_changed_at) timestamp=status_changed_at)

View File

@ -16,7 +16,6 @@
import os import os
import time import time
import traceback import traceback
from datetime import datetime
from swift import gettext_ as _ from swift import gettext_ as _
from xml.etree.cElementTree import Element, SubElement, tostring from xml.etree.cElementTree import Element, SubElement, tostring
@ -30,10 +29,10 @@ from swift.common.container_sync_realms import ContainerSyncRealms
from swift.common.request_helpers import get_param, get_listing_content_type, \ from swift.common.request_helpers import get_param, get_listing_content_type, \
split_and_validate_path, is_sys_or_user_meta split_and_validate_path, is_sys_or_user_meta
from swift.common.utils import get_logger, hash_path, public, \ from swift.common.utils import get_logger, hash_path, public, \
normalize_timestamp, storage_directory, validate_sync_to, \ Timestamp, storage_directory, validate_sync_to, \
config_true_value, json, timing_stats, replication, \ config_true_value, json, timing_stats, replication, \
override_bytes_from_content_type, get_log_line override_bytes_from_content_type, get_log_line
from swift.common.constraints import check_mount, check_float, check_utf8 from swift.common.constraints import check_mount, valid_timestamp, check_utf8
from swift.common import constraints from swift.common import constraints
from swift.common.bufferedhttp import http_connect from swift.common.bufferedhttp import http_connect
from swift.common.exceptions import ConnectionTimeout from swift.common.exceptions import ConnectionTimeout
@ -51,13 +50,13 @@ def gen_resp_headers(info, is_deleted=False):
""" """
# backend headers are always included # backend headers are always included
headers = { headers = {
'X-Backend-Timestamp': normalize_timestamp(info.get('created_at', 0)), 'X-Backend-Timestamp': Timestamp(info.get('created_at', 0)).internal,
'X-Backend-PUT-Timestamp': normalize_timestamp( 'X-Backend-PUT-Timestamp': Timestamp(info.get(
info.get('put_timestamp', 0)), 'put_timestamp', 0)).internal,
'X-Backend-DELETE-Timestamp': normalize_timestamp( 'X-Backend-DELETE-Timestamp': Timestamp(
info.get('delete_timestamp', 0)), info.get('delete_timestamp', 0)).internal,
'X-Backend-Status-Changed-At': normalize_timestamp( 'X-Backend-Status-Changed-At': Timestamp(
info.get('status_changed_at', 0)), info.get('status_changed_at', 0)).internal,
POLICY_INDEX: info.get('storage_policy_index', 0), POLICY_INDEX: info.get('storage_policy_index', 0),
} }
if not is_deleted: if not is_deleted:
@ -65,9 +64,9 @@ def gen_resp_headers(info, is_deleted=False):
headers.update({ headers.update({
'X-Container-Object-Count': info.get('object_count', 0), 'X-Container-Object-Count': info.get('object_count', 0),
'X-Container-Bytes-Used': info.get('bytes_used', 0), 'X-Container-Bytes-Used': info.get('bytes_used', 0),
'X-Timestamp': normalize_timestamp(info.get('created_at', 0)), 'X-Timestamp': Timestamp(info.get('created_at', 0)).normal,
'X-PUT-Timestamp': normalize_timestamp( 'X-PUT-Timestamp': Timestamp(
info.get('put_timestamp', 0)), info.get('put_timestamp', 0)).normal,
}) })
return headers return headers
@ -245,10 +244,7 @@ class ContainerController(object):
"""Handle HTTP DELETE request.""" """Handle HTTP DELETE request."""
drive, part, account, container, obj = split_and_validate_path( drive, part, account, container, obj = split_and_validate_path(
req, 4, 5, True) req, 4, 5, True)
if 'x-timestamp' not in req.headers or \ req_timestamp = valid_timestamp(req)
not check_float(req.headers['x-timestamp']):
return HTTPBadRequest(body='Missing timestamp', request=req,
content_type='text/plain')
if self.mount_check and not check_mount(self.root, drive): if self.mount_check and not check_mount(self.root, drive):
return HTTPInsufficientStorage(drive=drive, request=req) return HTTPInsufficientStorage(drive=drive, request=req)
# policy index is only relevant for delete_obj (and transitively for # policy index is only relevant for delete_obj (and transitively for
@ -258,10 +254,7 @@ class ContainerController(object):
if account.startswith(self.auto_create_account_prefix) and obj and \ if account.startswith(self.auto_create_account_prefix) and obj and \
not os.path.exists(broker.db_file): not os.path.exists(broker.db_file):
try: try:
broker.initialize( broker.initialize(req_timestamp.internal, obj_policy_index)
normalize_timestamp(
req.headers.get('x-timestamp') or time.time()),
obj_policy_index)
except DatabaseAlreadyExists: except DatabaseAlreadyExists:
pass pass
if not os.path.exists(broker.db_file): if not os.path.exists(broker.db_file):
@ -274,9 +267,9 @@ class ContainerController(object):
# delete container # delete container
if not broker.empty(): if not broker.empty():
return HTTPConflict(request=req) return HTTPConflict(request=req)
existed = float(broker.get_info()['put_timestamp']) and \ existed = Timestamp(broker.get_info()['put_timestamp']) and \
not broker.is_deleted() not broker.is_deleted()
broker.delete_db(req.headers['X-Timestamp']) broker.delete_db(req_timestamp.internal)
if not broker.is_deleted(): if not broker.is_deleted():
return HTTPConflict(request=req) return HTTPConflict(request=req)
resp = self.account_update(req, account, container, broker) resp = self.account_update(req, account, container, broker)
@ -298,6 +291,7 @@ class ContainerController(object):
when creating the container when creating the container
:param requested_policy_index: the storage policy index sent in the :param requested_policy_index: the storage policy index sent in the
request, may be None request, may be None
:returns: created, a bool, if database did not previously exist :returns: created, a bool, if database did not previously exist
""" """
if not os.path.exists(broker.db_file): if not os.path.exists(broker.db_file):
@ -329,10 +323,7 @@ class ContainerController(object):
"""Handle HTTP PUT request.""" """Handle HTTP PUT request."""
drive, part, account, container, obj = split_and_validate_path( drive, part, account, container, obj = split_and_validate_path(
req, 4, 5, True) req, 4, 5, True)
if 'x-timestamp' not in req.headers or \ req_timestamp = valid_timestamp(req)
not check_float(req.headers['x-timestamp']):
return HTTPBadRequest(body='Missing timestamp', request=req,
content_type='text/plain')
if 'x-container-sync-to' in req.headers: if 'x-container-sync-to' in req.headers:
err, sync_to, realm, realm_key = validate_sync_to( err, sync_to, realm, realm_key = validate_sync_to(
req.headers['x-container-sync-to'], self.allowed_sync_hosts, req.headers['x-container-sync-to'], self.allowed_sync_hosts,
@ -342,7 +333,6 @@ class ContainerController(object):
if self.mount_check and not check_mount(self.root, drive): if self.mount_check and not check_mount(self.root, drive):
return HTTPInsufficientStorage(drive=drive, request=req) return HTTPInsufficientStorage(drive=drive, request=req)
requested_policy_index = self.get_and_validate_policy_index(req) requested_policy_index = self.get_and_validate_policy_index(req)
timestamp = normalize_timestamp(req.headers['x-timestamp'])
broker = self._get_container_broker(drive, part, account, container) broker = self._get_container_broker(drive, part, account, container)
if obj: # put container object if obj: # put container object
# obj put expects the policy_index header, default is for # obj put expects the policy_index header, default is for
@ -351,12 +341,12 @@ class ContainerController(object):
if account.startswith(self.auto_create_account_prefix) and \ if account.startswith(self.auto_create_account_prefix) and \
not os.path.exists(broker.db_file): not os.path.exists(broker.db_file):
try: try:
broker.initialize(timestamp, obj_policy_index) broker.initialize(req_timestamp.internal, obj_policy_index)
except DatabaseAlreadyExists: except DatabaseAlreadyExists:
pass pass
if not os.path.exists(broker.db_file): if not os.path.exists(broker.db_file):
return HTTPNotFound() return HTTPNotFound()
broker.put_object(obj, timestamp, broker.put_object(obj, req_timestamp.internal,
int(req.headers['x-size']), int(req.headers['x-size']),
req.headers['x-content-type'], req.headers['x-content-type'],
req.headers['x-etag'], 0, req.headers['x-etag'], 0,
@ -370,12 +360,12 @@ class ContainerController(object):
else: else:
new_container_policy = requested_policy_index new_container_policy = requested_policy_index
created = self._update_or_create(req, broker, created = self._update_or_create(req, broker,
timestamp, req_timestamp.internal,
new_container_policy, new_container_policy,
requested_policy_index) requested_policy_index)
metadata = {} metadata = {}
metadata.update( metadata.update(
(key, (value, timestamp)) (key, (value, req_timestamp.internal))
for key, value in req.headers.iteritems() for key, value in req.headers.iteritems()
if key.lower() in self.save_headers or if key.lower() in self.save_headers or
is_sys_or_user_meta('container', key)) is_sys_or_user_meta('container', key))
@ -433,11 +423,7 @@ class ContainerController(object):
return {'subdir': name} return {'subdir': name}
response = {'bytes': size, 'hash': etag, 'name': name, response = {'bytes': size, 'hash': etag, 'name': name,
'content_type': content_type} 'content_type': content_type}
last_modified = datetime.utcfromtimestamp(float(created)).isoformat() response['last_modified'] = Timestamp(created).isoformat
# python isoformat() doesn't include msecs when zero
if len(last_modified) < len("1970-01-01T00:00:00.000000"):
last_modified += ".000000"
response['last_modified'] = last_modified
override_bytes_from_content_type(response, logger=self.logger) override_bytes_from_content_type(response, logger=self.logger)
return response return response
@ -541,10 +527,7 @@ class ContainerController(object):
def POST(self, req): def POST(self, req):
"""Handle HTTP POST request.""" """Handle HTTP POST request."""
drive, part, account, container = split_and_validate_path(req, 4) drive, part, account, container = split_and_validate_path(req, 4)
if 'x-timestamp' not in req.headers or \ req_timestamp = valid_timestamp(req)
not check_float(req.headers['x-timestamp']):
return HTTPBadRequest(body='Missing or bad timestamp',
request=req, content_type='text/plain')
if 'x-container-sync-to' in req.headers: if 'x-container-sync-to' in req.headers:
err, sync_to, realm, realm_key = validate_sync_to( err, sync_to, realm, realm_key = validate_sync_to(
req.headers['x-container-sync-to'], self.allowed_sync_hosts, req.headers['x-container-sync-to'], self.allowed_sync_hosts,
@ -556,10 +539,10 @@ class ContainerController(object):
broker = self._get_container_broker(drive, part, account, container) broker = self._get_container_broker(drive, part, account, container)
if broker.is_deleted(): if broker.is_deleted():
return HTTPNotFound(request=req) return HTTPNotFound(request=req)
timestamp = normalize_timestamp(req.headers['x-timestamp'])
metadata = {} metadata = {}
metadata.update( metadata.update(
(key, (value, timestamp)) for key, value in req.headers.iteritems() (key, (value, req_timestamp.internal))
for key, value in req.headers.iteritems()
if key.lower() in self.save_headers or if key.lower() in self.save_headers or
is_sys_or_user_meta('container', key)) is_sys_or_user_meta('container', key))
if metadata: if metadata:

View File

@ -32,7 +32,7 @@ from swift.common.ring import Ring
from swift.common.utils import ( from swift.common.utils import (
audit_location_generator, clean_content_type, config_true_value, audit_location_generator, clean_content_type, config_true_value,
FileLikeIter, get_logger, hash_path, quote, urlparse, validate_sync_to, FileLikeIter, get_logger, hash_path, quote, urlparse, validate_sync_to,
whataremyips) whataremyips, Timestamp)
from swift.common.daemon import Daemon from swift.common.daemon import Daemon
from swift.common.http import HTTP_UNAUTHORIZED, HTTP_NOT_FOUND from swift.common.http import HTTP_UNAUTHORIZED, HTTP_NOT_FOUND
from swift.common.storage_policy import POLICIES, POLICY_INDEX from swift.common.storage_policy import POLICIES, POLICY_INDEX
@ -373,7 +373,7 @@ class ContainerSync(Daemon):
row['name']) row['name'])
shuffle(nodes) shuffle(nodes)
exc = None exc = None
looking_for_timestamp = float(row['created_at']) looking_for_timestamp = Timestamp(row['created_at'])
timestamp = -1 timestamp = -1
headers = body = None headers = body = None
headers_out = {POLICY_INDEX: str(info['storage_policy_index'])} headers_out = {POLICY_INDEX: str(info['storage_policy_index'])}
@ -383,7 +383,8 @@ class ContainerSync(Daemon):
node, part, info['account'], info['container'], node, part, info['account'], info['container'],
row['name'], headers=headers_out, row['name'], headers=headers_out,
resp_chunk_size=65536) resp_chunk_size=65536)
this_timestamp = float(these_headers['x-timestamp']) this_timestamp = Timestamp(
these_headers['x-timestamp'])
if this_timestamp > timestamp: if this_timestamp > timestamp:
timestamp = this_timestamp timestamp = this_timestamp
headers = these_headers headers = these_headers

View File

@ -30,7 +30,7 @@ from swift.common.bufferedhttp import http_connect
from swift.common.exceptions import ConnectionTimeout from swift.common.exceptions import ConnectionTimeout
from swift.common.ring import Ring from swift.common.ring import Ring
from swift.common.utils import get_logger, config_true_value, ismount, \ from swift.common.utils import get_logger, config_true_value, ismount, \
dump_recon_cache, quorum_size dump_recon_cache, quorum_size, Timestamp
from swift.common.daemon import Daemon from swift.common.daemon import Daemon
from swift.common.http import is_success, HTTP_INTERNAL_SERVER_ERROR from swift.common.http import is_success, HTTP_INTERNAL_SERVER_ERROR
from swift.common.storage_policy import POLICY_INDEX from swift.common.storage_policy import POLICY_INDEX
@ -210,7 +210,7 @@ class ContainerUpdater(Daemon):
info = broker.get_info() info = broker.get_info()
# Don't send updates if the container was auto-created since it # Don't send updates if the container was auto-created since it
# definitely doesn't have up to date statistics. # definitely doesn't have up to date statistics.
if float(info['put_timestamp']) <= 0: if Timestamp(info['put_timestamp']) <= 0:
return return
if self.account_suppressions.get(info['account'], 0) > time.time(): if self.account_suppressions.get(info['account'], 0) > time.time():
return return

View File

@ -49,7 +49,7 @@ from eventlet import Timeout
from swift import gettext_ as _ from swift import gettext_ as _
from swift.common.constraints import check_mount from swift.common.constraints import check_mount
from swift.common.utils import mkdirs, normalize_timestamp, \ from swift.common.utils import mkdirs, Timestamp, \
storage_directory, hash_path, renamer, fallocate, fsync, \ storage_directory, hash_path, renamer, fallocate, fsync, \
fdatasync, drop_buffer_cache, ThreadPool, lock_path, write_pickle, \ fdatasync, drop_buffer_cache, ThreadPool, lock_path, write_pickle, \
config_true_value, listdir, split_path, ismount, remove_file config_true_value, listdir, split_path, ismount, remove_file
@ -228,7 +228,7 @@ def hash_cleanup_listdir(hsh_path, reclaim_age=ONE_WEEK):
if files[0].endswith('.ts'): if files[0].endswith('.ts'):
# remove tombstones older than reclaim_age # remove tombstones older than reclaim_age
ts = files[0].rsplit('.', 1)[0] ts = files[0].rsplit('.', 1)[0]
if (time.time() - float(ts)) > reclaim_age: if (time.time() - float(Timestamp(ts))) > reclaim_age:
remove_file(join(hsh_path, files[0])) remove_file(join(hsh_path, files[0]))
files.remove(files[0]) files.remove(files[0])
elif files: elif files:
@ -552,7 +552,7 @@ class DiskFileManager(object):
write_pickle, write_pickle,
data, data,
os.path.join(async_dir, ohash[-3:], ohash + '-' + os.path.join(async_dir, ohash[-3:], ohash + '-' +
normalize_timestamp(timestamp)), Timestamp(timestamp).internal),
os.path.join(device_path, get_tmp_dir(policy_idx))) os.path.join(device_path, get_tmp_dir(policy_idx)))
self.logger.increment('async_pendings') self.logger.increment('async_pendings')
@ -794,7 +794,7 @@ class DiskFileWriter(object):
:param metadata: dictionary of metadata to be associated with the :param metadata: dictionary of metadata to be associated with the
object object
""" """
timestamp = normalize_timestamp(metadata['X-Timestamp']) timestamp = Timestamp(metadata['X-Timestamp']).internal
metadata['name'] = self._name metadata['name'] = self._name
target_path = join(self._datadir, timestamp + self._extension) target_path = join(self._datadir, timestamp + self._extension)
@ -1060,7 +1060,7 @@ class DiskFile(object):
def timestamp(self): def timestamp(self):
if self._metadata is None: if self._metadata is None:
raise DiskFileNotOpen() raise DiskFileNotOpen()
return self._metadata.get('X-Timestamp') return Timestamp(self._metadata.get('X-Timestamp'))
@classmethod @classmethod
def from_hash_dir(cls, mgr, hash_dir_path, device_path, partition): def from_hash_dir(cls, mgr, hash_dir_path, device_path, partition):
@ -1449,7 +1449,7 @@ class DiskFile(object):
:raises DiskFileError: this implementation will raise the same :raises DiskFileError: this implementation will raise the same
errors as the `create()` method. errors as the `create()` method.
""" """
timestamp = normalize_timestamp(timestamp) timestamp = Timestamp(timestamp).internal
with self.create() as deleter: with self.create() as deleter:
deleter._extension = '.ts' deleter._extension = '.ts'

View File

@ -22,7 +22,7 @@ from contextlib import contextmanager
from eventlet import Timeout from eventlet import Timeout
from swift.common.utils import normalize_timestamp from swift.common.utils import Timestamp
from swift.common.exceptions import DiskFileQuarantined, DiskFileNotExist, \ from swift.common.exceptions import DiskFileQuarantined, DiskFileNotExist, \
DiskFileCollision, DiskFileDeleted, DiskFileNotOpen DiskFileCollision, DiskFileDeleted, DiskFileNotOpen
from swift.common.swob import multi_range_iterator from swift.common.swob import multi_range_iterator
@ -394,7 +394,6 @@ class DiskFile(object):
:param timestamp: timestamp to compare with each file :param timestamp: timestamp to compare with each file
""" """
timestamp = normalize_timestamp(timestamp)
fp, md = self._filesystem.get_object(self._name) fp, md = self._filesystem.get_object(self._name)
if md['X-Timestamp'] < timestamp: if md['X-Timestamp'] < Timestamp(timestamp):
self._filesystem.del_object(self._name) self._filesystem.del_object(self._name)

View File

@ -29,10 +29,10 @@ from eventlet import sleep, Timeout
from swift.common.utils import public, get_logger, \ from swift.common.utils import public, get_logger, \
config_true_value, timing_stats, replication, \ config_true_value, timing_stats, replication, \
normalize_delete_at_timestamp, get_log_line normalize_delete_at_timestamp, get_log_line, Timestamp
from swift.common.bufferedhttp import http_connect from swift.common.bufferedhttp import http_connect
from swift.common.constraints import check_object_creation, \ from swift.common.constraints import check_object_creation, \
check_float, check_utf8 valid_timestamp, check_utf8
from swift.common.exceptions import ConnectionTimeout, DiskFileQuarantined, \ from swift.common.exceptions import ConnectionTimeout, DiskFileQuarantined, \
DiskFileNotExist, DiskFileCollision, DiskFileNoSpace, DiskFileDeleted, \ DiskFileNotExist, DiskFileCollision, DiskFileNoSpace, DiskFileDeleted, \
DiskFileDeviceUnavailable, DiskFileExpired, ChunkReadTimeout DiskFileDeviceUnavailable, DiskFileExpired, ChunkReadTimeout
@ -271,7 +271,7 @@ class ObjectController(object):
headers_in = request.headers headers_in = request.headers
headers_out = HeaderKeyDict({ headers_out = HeaderKeyDict({
POLICY_INDEX: 0, # system accounts are always Policy-0 POLICY_INDEX: 0, # system accounts are always Policy-0
'x-timestamp': headers_in['x-timestamp'], 'x-timestamp': request.timestamp.internal,
'x-trans-id': headers_in.get('x-trans-id', '-'), 'x-trans-id': headers_in.get('x-trans-id', '-'),
'referer': request.as_referer()}) 'referer': request.as_referer()})
if op != 'DELETE': if op != 'DELETE':
@ -325,10 +325,7 @@ class ObjectController(object):
"""Handle HTTP POST requests for the Swift Object Server.""" """Handle HTTP POST requests for the Swift Object Server."""
device, partition, account, container, obj, policy_idx = \ device, partition, account, container, obj, policy_idx = \
get_name_and_placement(request, 5, 5, True) get_name_and_placement(request, 5, 5, True)
if 'x-timestamp' not in request.headers or \ req_timestamp = valid_timestamp(request)
not check_float(request.headers['x-timestamp']):
return HTTPBadRequest(body='Missing timestamp', request=request,
content_type='text/plain')
new_delete_at = int(request.headers.get('X-Delete-At') or 0) new_delete_at = int(request.headers.get('X-Delete-At') or 0)
if new_delete_at and new_delete_at < time.time(): if new_delete_at and new_delete_at < time.time():
return HTTPBadRequest(body='X-Delete-At in past', request=request, return HTTPBadRequest(body='X-Delete-At in past', request=request,
@ -343,10 +340,10 @@ class ObjectController(object):
orig_metadata = disk_file.read_metadata() orig_metadata = disk_file.read_metadata()
except (DiskFileNotExist, DiskFileQuarantined): except (DiskFileNotExist, DiskFileQuarantined):
return HTTPNotFound(request=request) return HTTPNotFound(request=request)
orig_timestamp = orig_metadata.get('X-Timestamp', '0') orig_timestamp = Timestamp(orig_metadata.get('X-Timestamp', 0))
if orig_timestamp >= request.headers['x-timestamp']: if orig_timestamp >= req_timestamp:
return HTTPConflict(request=request) return HTTPConflict(request=request)
metadata = {'X-Timestamp': request.headers['x-timestamp']} metadata = {'X-Timestamp': req_timestamp.internal}
metadata.update(val for val in request.headers.iteritems() metadata.update(val for val in request.headers.iteritems()
if is_user_meta('object', val[0])) if is_user_meta('object', val[0]))
for header_key in self.allowed_headers: for header_key in self.allowed_headers:
@ -371,10 +368,7 @@ class ObjectController(object):
"""Handle HTTP PUT requests for the Swift Object Server.""" """Handle HTTP PUT requests for the Swift Object Server."""
device, partition, account, container, obj, policy_idx = \ device, partition, account, container, obj, policy_idx = \
get_name_and_placement(request, 5, 5, True) get_name_and_placement(request, 5, 5, True)
if 'x-timestamp' not in request.headers or \ req_timestamp = valid_timestamp(request)
not check_float(request.headers['x-timestamp']):
return HTTPBadRequest(body='Missing timestamp', request=request,
content_type='text/plain')
error_response = check_object_creation(request, obj) error_response = check_object_creation(request, obj)
if error_response: if error_response:
return error_response return error_response
@ -407,8 +401,8 @@ class ObjectController(object):
# The current ETag matches, so return 412 # The current ETag matches, so return 412
return HTTPPreconditionFailed(request=request) return HTTPPreconditionFailed(request=request)
orig_timestamp = orig_metadata.get('X-Timestamp') orig_timestamp = Timestamp(orig_metadata.get('X-Timestamp', 0))
if orig_timestamp and orig_timestamp >= request.headers['x-timestamp']: if orig_timestamp and orig_timestamp >= req_timestamp:
return HTTPConflict(request=request) return HTTPConflict(request=request)
orig_delete_at = int(orig_metadata.get('X-Delete-At') or 0) orig_delete_at = int(orig_metadata.get('X-Delete-At') or 0)
upload_expiration = time.time() + self.max_upload_time upload_expiration = time.time() + self.max_upload_time
@ -445,7 +439,7 @@ class ObjectController(object):
request.headers['etag'].lower() != etag: request.headers['etag'].lower() != etag:
return HTTPUnprocessableEntity(request=request) return HTTPUnprocessableEntity(request=request)
metadata = { metadata = {
'X-Timestamp': request.headers['x-timestamp'], 'X-Timestamp': request.timestamp.internal,
'Content-Type': request.headers['content-type'], 'Content-Type': request.headers['content-type'],
'ETag': etag, 'ETag': etag,
'Content-Length': str(upload_size), 'Content-Length': str(upload_size),
@ -499,8 +493,7 @@ class ObjectController(object):
with disk_file.open(): with disk_file.open():
metadata = disk_file.get_metadata() metadata = disk_file.get_metadata()
obj_size = int(metadata['Content-Length']) obj_size = int(metadata['Content-Length'])
file_x_ts = metadata['X-Timestamp'] file_x_ts = Timestamp(metadata['X-Timestamp'])
file_x_ts_flt = float(file_x_ts)
keep_cache = (self.keep_cache_private or keep_cache = (self.keep_cache_private or
('X-Auth-Token' not in request.headers and ('X-Auth-Token' not in request.headers and
'X-Storage-Token' not in request.headers)) 'X-Storage-Token' not in request.headers))
@ -514,19 +507,20 @@ class ObjectController(object):
key.lower() in self.allowed_headers: key.lower() in self.allowed_headers:
response.headers[key] = value response.headers[key] = value
response.etag = metadata['ETag'] response.etag = metadata['ETag']
response.last_modified = math.ceil(file_x_ts_flt) response.last_modified = math.ceil(float(file_x_ts))
response.content_length = obj_size response.content_length = obj_size
try: try:
response.content_encoding = metadata[ response.content_encoding = metadata[
'Content-Encoding'] 'Content-Encoding']
except KeyError: except KeyError:
pass pass
response.headers['X-Timestamp'] = file_x_ts response.headers['X-Timestamp'] = file_x_ts.normal
response.headers['X-Backend-Timestamp'] = file_x_ts.internal
resp = request.get_response(response) resp = request.get_response(response)
except (DiskFileNotExist, DiskFileQuarantined) as e: except (DiskFileNotExist, DiskFileQuarantined) as e:
headers = {} headers = {}
if hasattr(e, 'timestamp'): if hasattr(e, 'timestamp'):
headers['X-Backend-Timestamp'] = e.timestamp headers['X-Backend-Timestamp'] = e.timestamp.internal
resp = HTTPNotFound(request=request, headers=headers, resp = HTTPNotFound(request=request, headers=headers,
conditional_response=True) conditional_response=True)
return resp return resp
@ -548,7 +542,7 @@ class ObjectController(object):
except (DiskFileNotExist, DiskFileQuarantined) as e: except (DiskFileNotExist, DiskFileQuarantined) as e:
headers = {} headers = {}
if hasattr(e, 'timestamp'): if hasattr(e, 'timestamp'):
headers['X-Backend-Timestamp'] = e.timestamp headers['X-Backend-Timestamp'] = e.timestamp.internal
return HTTPNotFound(request=request, headers=headers, return HTTPNotFound(request=request, headers=headers,
conditional_response=True) conditional_response=True)
response = Response(request=request, conditional_response=True) response = Response(request=request, conditional_response=True)
@ -559,10 +553,11 @@ class ObjectController(object):
key.lower() in self.allowed_headers: key.lower() in self.allowed_headers:
response.headers[key] = value response.headers[key] = value
response.etag = metadata['ETag'] response.etag = metadata['ETag']
ts = metadata['X-Timestamp'] ts = Timestamp(metadata['X-Timestamp'])
response.last_modified = math.ceil(float(ts)) response.last_modified = math.ceil(float(ts))
# Needed for container sync feature # Needed for container sync feature
response.headers['X-Timestamp'] = ts response.headers['X-Timestamp'] = ts.normal
response.headers['X-Backend-Timestamp'] = ts.internal
response.content_length = int(metadata['Content-Length']) response.content_length = int(metadata['Content-Length'])
try: try:
response.content_encoding = metadata['Content-Encoding'] response.content_encoding = metadata['Content-Encoding']
@ -576,10 +571,7 @@ class ObjectController(object):
"""Handle HTTP DELETE requests for the Swift Object Server.""" """Handle HTTP DELETE requests for the Swift Object Server."""
device, partition, account, container, obj, policy_idx = \ device, partition, account, container, obj, policy_idx = \
get_name_and_placement(request, 5, 5, True) get_name_and_placement(request, 5, 5, True)
if 'x-timestamp' not in request.headers or \ req_timestamp = valid_timestamp(request)
not check_float(request.headers['x-timestamp']):
return HTTPBadRequest(body='Missing timestamp', request=request,
content_type='text/plain')
try: try:
disk_file = self.get_diskfile( disk_file = self.get_diskfile(
device, partition, account, container, obj, device, partition, account, container, obj,
@ -601,8 +593,8 @@ class ObjectController(object):
orig_metadata = {} orig_metadata = {}
response_class = HTTPNotFound response_class = HTTPNotFound
else: else:
orig_timestamp = orig_metadata.get('X-Timestamp', 0) orig_timestamp = Timestamp(orig_metadata.get('X-Timestamp', 0))
if orig_timestamp < request.headers['x-timestamp']: if orig_timestamp < req_timestamp:
response_class = HTTPNoContent response_class = HTTPNoContent
else: else:
response_class = HTTPConflict response_class = HTTPConflict
@ -633,12 +625,11 @@ class ObjectController(object):
self.delete_at_update('DELETE', orig_delete_at, account, self.delete_at_update('DELETE', orig_delete_at, account,
container, obj, request, device, container, obj, request, device,
policy_idx) policy_idx)
req_timestamp = request.headers['X-Timestamp']
if orig_timestamp < req_timestamp: if orig_timestamp < req_timestamp:
disk_file.delete(req_timestamp) disk_file.delete(req_timestamp)
self.container_update( self.container_update(
'DELETE', account, container, obj, request, 'DELETE', account, container, obj, request,
HeaderKeyDict({'x-timestamp': req_timestamp}), HeaderKeyDict({'x-timestamp': req_timestamp.internal}),
device, policy_idx) device, policy_idx)
return response_class(request=request) return response_class(request=request)

View File

@ -36,7 +36,7 @@ from eventlet import sleep
from eventlet.timeout import Timeout from eventlet.timeout import Timeout
from swift.common.wsgi import make_pre_authed_env from swift.common.wsgi import make_pre_authed_env
from swift.common.utils import normalize_timestamp, config_true_value, \ from swift.common.utils import Timestamp, config_true_value, \
public, split_path, list_from_csv, GreenthreadSafeIterator, \ public, split_path, list_from_csv, GreenthreadSafeIterator, \
quorum_size, GreenAsyncPile quorum_size, GreenAsyncPile
from swift.common.bufferedhttp import http_connect from swift.common.bufferedhttp import http_connect
@ -926,7 +926,7 @@ class Controller(object):
headers = HeaderKeyDict(additional) if additional else HeaderKeyDict() headers = HeaderKeyDict(additional) if additional else HeaderKeyDict()
if transfer: if transfer:
self.transfer_headers(orig_req.headers, headers) self.transfer_headers(orig_req.headers, headers)
headers.setdefault('x-timestamp', normalize_timestamp(time.time())) headers.setdefault('x-timestamp', Timestamp(time.time()).internal)
if orig_req: if orig_req:
referer = orig_req.as_referer() referer = orig_req.as_referer()
else: else:
@ -1158,7 +1158,7 @@ class Controller(object):
""" """
partition, nodes = self.app.account_ring.get_nodes(account) partition, nodes = self.app.account_ring.get_nodes(account)
path = '/%s' % account path = '/%s' % account
headers = {'X-Timestamp': normalize_timestamp(time.time()), headers = {'X-Timestamp': Timestamp(time.time()).internal,
'X-Trans-Id': self.trans_id, 'X-Trans-Id': self.trans_id,
'Connection': 'close'} 'Connection': 'close'}
resp = self.make_requests(Request.blank('/v1' + path), resp = self.make_requests(Request.blank('/v1' + path),

View File

@ -17,7 +17,7 @@ from swift import gettext_ as _
from urllib import unquote from urllib import unquote
import time import time
from swift.common.utils import public, csv_append, normalize_timestamp from swift.common.utils import public, csv_append, Timestamp
from swift.common.constraints import check_metadata from swift.common.constraints import check_metadata
from swift.common import constraints from swift.common import constraints
from swift.common.http import HTTP_ACCEPTED from swift.common.http import HTTP_ACCEPTED
@ -209,7 +209,7 @@ class ContainerController(Controller):
def _backend_requests(self, req, n_outgoing, account_partition, accounts, def _backend_requests(self, req, n_outgoing, account_partition, accounts,
policy_index=None): policy_index=None):
additional = {'X-Timestamp': normalize_timestamp(time.time())} additional = {'X-Timestamp': Timestamp(time.time()).internal}
if policy_index is None: if policy_index is None:
additional['X-Backend-Storage-Policy-Default'] = \ additional['X-Backend-Storage-Policy-Default'] = \
int(POLICIES.default) int(POLICIES.default)

View File

@ -37,8 +37,8 @@ from eventlet.timeout import Timeout
from swift.common.utils import ( from swift.common.utils import (
clean_content_type, config_true_value, ContextPool, csv_append, clean_content_type, config_true_value, ContextPool, csv_append,
GreenAsyncPile, GreenthreadSafeIterator, json, GreenAsyncPile, GreenthreadSafeIterator, json, Timestamp,
normalize_delete_at_timestamp, normalize_timestamp, public, quorum_size) normalize_delete_at_timestamp, public, quorum_size)
from swift.common.bufferedhttp import http_connect from swift.common.bufferedhttp import http_connect
from swift.common.constraints import check_metadata, check_object_creation, \ from swift.common.constraints import check_metadata, check_object_creation, \
check_copy_from_header check_copy_from_header
@ -309,7 +309,7 @@ class ObjectController(Controller):
req.headers[POLICY_INDEX] = policy_index req.headers[POLICY_INDEX] = policy_index
partition, nodes = obj_ring.get_nodes( partition, nodes = obj_ring.get_nodes(
self.account_name, self.container_name, self.object_name) self.account_name, self.container_name, self.object_name)
req.headers['X-Timestamp'] = normalize_timestamp(time.time()) req.headers['X-Timestamp'] = Timestamp(time.time()).internal
headers = self._backend_requests( headers = self._backend_requests(
req, len(nodes), container_partition, containers, req, len(nodes), container_partition, containers,
@ -510,19 +510,18 @@ class ObjectController(Controller):
# Used by container sync feature # Used by container sync feature
if 'x-timestamp' in req.headers: if 'x-timestamp' in req.headers:
try: try:
req.headers['X-Timestamp'] = \ req_timestamp = Timestamp(req.headers['X-Timestamp'])
normalize_timestamp(req.headers['x-timestamp'])
if hresp.environ and 'swift_x_timestamp' in hresp.environ and \ if hresp.environ and 'swift_x_timestamp' in hresp.environ and \
float(hresp.environ['swift_x_timestamp']) >= \ hresp.environ['swift_x_timestamp'] >= req_timestamp:
float(req.headers['x-timestamp']):
return HTTPAccepted(request=req) return HTTPAccepted(request=req)
except ValueError: except ValueError:
return HTTPBadRequest( return HTTPBadRequest(
request=req, content_type='text/plain', request=req, content_type='text/plain',
body='X-Timestamp should be a UNIX timestamp float value; ' body='X-Timestamp should be a UNIX timestamp float value; '
'was %r' % req.headers['x-timestamp']) 'was %r' % req.headers['x-timestamp'])
req.headers['X-Timestamp'] = req_timestamp.internal
else: else:
req.headers['X-Timestamp'] = normalize_timestamp(time.time()) req.headers['X-Timestamp'] = Timestamp(time.time()).internal
# Sometimes the 'content-type' header exists, but is set to None. # Sometimes the 'content-type' header exists, but is set to None.
content_type_manually_set = True content_type_manually_set = True
detect_content_type = \ detect_content_type = \
@ -554,7 +553,7 @@ class ObjectController(Controller):
ts_source = time.mktime(time.strptime( ts_source = time.mktime(time.strptime(
hresp.headers['last-modified'], hresp.headers['last-modified'],
'%a, %d %b %Y %H:%M:%S GMT')) '%a, %d %b %Y %H:%M:%S GMT'))
new_ts = normalize_timestamp(ts_source) new_ts = Timestamp(ts_source).internal
vers_obj_name = lprefix + new_ts vers_obj_name = lprefix + new_ts
copy_headers = { copy_headers = {
'Destination': '%s/%s' % (lcontainer, vers_obj_name)} 'Destination': '%s/%s' % (lcontainer, vers_obj_name)}
@ -766,7 +765,8 @@ class ObjectController(Controller):
resp.headers['X-Copied-From-Last-Modified'] = \ resp.headers['X-Copied-From-Last-Modified'] = \
source_resp.headers['last-modified'] source_resp.headers['last-modified']
copy_headers_into(req, resp) copy_headers_into(req, resp)
resp.last_modified = math.ceil(float(req.headers['X-Timestamp'])) resp.last_modified = math.ceil(
float(Timestamp(req.headers['X-Timestamp'])))
return resp return resp
@public @public
@ -858,15 +858,15 @@ class ObjectController(Controller):
# Used by container sync feature # Used by container sync feature
if 'x-timestamp' in req.headers: if 'x-timestamp' in req.headers:
try: try:
req.headers['X-Timestamp'] = \ req_timestamp = Timestamp(req.headers['X-Timestamp'])
normalize_timestamp(req.headers['x-timestamp'])
except ValueError: except ValueError:
return HTTPBadRequest( return HTTPBadRequest(
request=req, content_type='text/plain', request=req, content_type='text/plain',
body='X-Timestamp should be a UNIX timestamp float value; ' body='X-Timestamp should be a UNIX timestamp float value; '
'was %r' % req.headers['x-timestamp']) 'was %r' % req.headers['x-timestamp'])
req.headers['X-Timestamp'] = req_timestamp.internal
else: else:
req.headers['X-Timestamp'] = normalize_timestamp(time.time()) req.headers['X-Timestamp'] = Timestamp(time.time()).internal
headers = self._backend_requests( headers = self._backend_requests(
req, len(nodes), container_partition, containers) req, len(nodes), container_partition, containers)

View File

@ -21,6 +21,7 @@ from nose import SkipTest
from swift.common.internal_client import InternalClient from swift.common.internal_client import InternalClient
from swift.common.manager import Manager from swift.common.manager import Manager
from swift.common.storage_policy import POLICIES from swift.common.storage_policy import POLICIES
from swift.common.utils import Timestamp
from test.probe.common import reset_environment, get_to_final_state from test.probe.common import reset_environment, get_to_final_state
from test.probe.test_container_merge_policy_index import BrainSplitter from test.probe.test_container_merge_policy_index import BrainSplitter
@ -60,12 +61,12 @@ class TestObjectExpirer(unittest.TestCase):
# create an expiring object and a container with the wrong policy # create an expiring object and a container with the wrong policy
self.brain.stop_primary_half() self.brain.stop_primary_half()
self.brain.put_container(int(old_policy)) self.brain.put_container(int(old_policy))
self.brain.put_object(headers={'X-Delete-After': 1}) self.brain.put_object(headers={'X-Delete-After': 2})
# get the object timestamp # get the object timestamp
metadata = self.client.get_object_metadata( metadata = self.client.get_object_metadata(
self.account, self.container_name, self.object_name, self.account, self.container_name, self.object_name,
headers={'X-Backend-Storage-Policy-Index': int(old_policy)}) headers={'X-Backend-Storage-Policy-Index': int(old_policy)})
create_timestamp = metadata['x-timestamp'] create_timestamp = Timestamp(metadata['x-timestamp'])
self.brain.start_primary_half() self.brain.start_primary_half()
# get the expiring object updates in their queue, while we have all # get the expiring object updates in their queue, while we have all
# the servers up # the servers up
@ -89,7 +90,7 @@ class TestObjectExpirer(unittest.TestCase):
acceptable_statuses=(4,), acceptable_statuses=(4,),
headers={'X-Backend-Storage-Policy-Index': int(old_policy)}) headers={'X-Backend-Storage-Policy-Index': int(old_policy)})
self.assert_('x-backend-timestamp' in metadata) self.assert_('x-backend-timestamp' in metadata)
self.assertEqual(metadata['x-backend-timestamp'], self.assertEqual(Timestamp(metadata['x-backend-timestamp']),
create_timestamp) create_timestamp)
# but it is still in the listing # but it is still in the listing
@ -124,8 +125,8 @@ class TestObjectExpirer(unittest.TestCase):
(found_in_policy, policy)) (found_in_policy, policy))
found_in_policy = policy found_in_policy = policy
self.assert_('x-backend-timestamp' in metadata) self.assert_('x-backend-timestamp' in metadata)
self.assert_(float(metadata['x-backend-timestamp']) > self.assert_(Timestamp(metadata['x-backend-timestamp']) >
float(create_timestamp)) create_timestamp)
if __name__ == "__main__": if __name__ == "__main__":
unittest.main() unittest.main()

View File

@ -29,7 +29,7 @@ from contextlib import contextmanager
import random import random
from swift.account.backend import AccountBroker from swift.account.backend import AccountBroker
from swift.common.utils import normalize_timestamp from swift.common.utils import Timestamp
from test.unit import patch_policies, with_tempdir from test.unit import patch_policies, with_tempdir
from swift.common.db import DatabaseConnectionError from swift.common.db import DatabaseConnectionError
from swift.common.storage_policy import StoragePolicy, POLICIES from swift.common.storage_policy import StoragePolicy, POLICIES
@ -57,7 +57,7 @@ class TestAccountBroker(unittest.TestCase):
self.fail("Unexpected exception raised: %r" % e) self.fail("Unexpected exception raised: %r" % e)
else: else:
self.fail("Expected a DatabaseConnectionError exception") self.fail("Expected a DatabaseConnectionError exception")
broker.initialize(normalize_timestamp('1')) broker.initialize(Timestamp('1').internal)
with broker.get() as conn: with broker.get() as conn:
curs = conn.cursor() curs = conn.cursor()
curs.execute('SELECT 1') curs.execute('SELECT 1')
@ -67,7 +67,7 @@ class TestAccountBroker(unittest.TestCase):
# Test AccountBroker throwing a conn away after exception # Test AccountBroker throwing a conn away after exception
first_conn = None first_conn = None
broker = AccountBroker(':memory:', account='a') broker = AccountBroker(':memory:', account='a')
broker.initialize(normalize_timestamp('1')) broker.initialize(Timestamp('1').internal)
with broker.get() as conn: with broker.get() as conn:
first_conn = conn first_conn = conn
try: try:
@ -81,20 +81,20 @@ class TestAccountBroker(unittest.TestCase):
def test_empty(self): def test_empty(self):
# Test AccountBroker.empty # Test AccountBroker.empty
broker = AccountBroker(':memory:', account='a') broker = AccountBroker(':memory:', account='a')
broker.initialize(normalize_timestamp('1')) broker.initialize(Timestamp('1').internal)
self.assert_(broker.empty()) self.assert_(broker.empty())
broker.put_container('o', normalize_timestamp(time()), 0, 0, 0, broker.put_container('o', Timestamp(time()).internal, 0, 0, 0,
POLICIES.default.idx) POLICIES.default.idx)
self.assert_(not broker.empty()) self.assert_(not broker.empty())
sleep(.00001) sleep(.00001)
broker.put_container('o', 0, normalize_timestamp(time()), 0, 0, broker.put_container('o', 0, Timestamp(time()).internal, 0, 0,
POLICIES.default.idx) POLICIES.default.idx)
self.assert_(broker.empty()) self.assert_(broker.empty())
def test_reclaim(self): def test_reclaim(self):
broker = AccountBroker(':memory:', account='test_account') broker = AccountBroker(':memory:', account='test_account')
broker.initialize(normalize_timestamp('1')) broker.initialize(Timestamp('1').internal)
broker.put_container('c', normalize_timestamp(time()), 0, 0, 0, broker.put_container('c', Timestamp(time()).internal, 0, 0, 0,
POLICIES.default.idx) POLICIES.default.idx)
with broker.get() as conn: with broker.get() as conn:
self.assertEqual(conn.execute( self.assertEqual(conn.execute(
@ -103,7 +103,7 @@ class TestAccountBroker(unittest.TestCase):
self.assertEqual(conn.execute( self.assertEqual(conn.execute(
"SELECT count(*) FROM container " "SELECT count(*) FROM container "
"WHERE deleted = 1").fetchone()[0], 0) "WHERE deleted = 1").fetchone()[0], 0)
broker.reclaim(normalize_timestamp(time() - 999), time()) broker.reclaim(Timestamp(time() - 999).internal, time())
with broker.get() as conn: with broker.get() as conn:
self.assertEqual(conn.execute( self.assertEqual(conn.execute(
"SELECT count(*) FROM container " "SELECT count(*) FROM container "
@ -112,7 +112,7 @@ class TestAccountBroker(unittest.TestCase):
"SELECT count(*) FROM container " "SELECT count(*) FROM container "
"WHERE deleted = 1").fetchone()[0], 0) "WHERE deleted = 1").fetchone()[0], 0)
sleep(.00001) sleep(.00001)
broker.put_container('c', 0, normalize_timestamp(time()), 0, 0, broker.put_container('c', 0, Timestamp(time()).internal, 0, 0,
POLICIES.default.idx) POLICIES.default.idx)
with broker.get() as conn: with broker.get() as conn:
self.assertEqual(conn.execute( self.assertEqual(conn.execute(
@ -121,7 +121,7 @@ class TestAccountBroker(unittest.TestCase):
self.assertEqual(conn.execute( self.assertEqual(conn.execute(
"SELECT count(*) FROM container " "SELECT count(*) FROM container "
"WHERE deleted = 1").fetchone()[0], 1) "WHERE deleted = 1").fetchone()[0], 1)
broker.reclaim(normalize_timestamp(time() - 999), time()) broker.reclaim(Timestamp(time() - 999).internal, time())
with broker.get() as conn: with broker.get() as conn:
self.assertEqual(conn.execute( self.assertEqual(conn.execute(
"SELECT count(*) FROM container " "SELECT count(*) FROM container "
@ -130,7 +130,7 @@ class TestAccountBroker(unittest.TestCase):
"SELECT count(*) FROM container " "SELECT count(*) FROM container "
"WHERE deleted = 1").fetchone()[0], 1) "WHERE deleted = 1").fetchone()[0], 1)
sleep(.00001) sleep(.00001)
broker.reclaim(normalize_timestamp(time()), time()) broker.reclaim(Timestamp(time()).internal, time())
with broker.get() as conn: with broker.get() as conn:
self.assertEqual(conn.execute( self.assertEqual(conn.execute(
"SELECT count(*) FROM container " "SELECT count(*) FROM container "
@ -142,15 +142,15 @@ class TestAccountBroker(unittest.TestCase):
broker.put_container('x', 0, 0, 0, 0, POLICIES.default.idx) broker.put_container('x', 0, 0, 0, 0, POLICIES.default.idx)
broker.put_container('y', 0, 0, 0, 0, POLICIES.default.idx) broker.put_container('y', 0, 0, 0, 0, POLICIES.default.idx)
broker.put_container('z', 0, 0, 0, 0, POLICIES.default.idx) broker.put_container('z', 0, 0, 0, 0, POLICIES.default.idx)
broker.reclaim(normalize_timestamp(time()), time()) broker.reclaim(Timestamp(time()).internal, time())
# self.assertEqual(len(res), 2) # self.assertEqual(len(res), 2)
# self.assert_(isinstance(res, tuple)) # self.assert_(isinstance(res, tuple))
# containers, account_name = res # containers, account_name = res
# self.assert_(containers is None) # self.assert_(containers is None)
# self.assert_(account_name is None) # self.assert_(account_name is None)
# Now delete the account # Now delete the account
broker.delete_db(normalize_timestamp(time())) broker.delete_db(Timestamp(time()).internal)
broker.reclaim(normalize_timestamp(time()), time()) broker.reclaim(Timestamp(time()).internal, time())
# self.assertEqual(len(res), 2) # self.assertEqual(len(res), 2)
# self.assert_(isinstance(res, tuple)) # self.assert_(isinstance(res, tuple))
# containers, account_name = res # containers, account_name = res
@ -162,34 +162,34 @@ class TestAccountBroker(unittest.TestCase):
# self.assert_('a' not in containers) # self.assert_('a' not in containers)
def test_delete_db_status(self): def test_delete_db_status(self):
start = int(time()) ts = (Timestamp(t).internal for t in itertools.count(int(time())))
ts = itertools.count(start) start = ts.next()
broker = AccountBroker(':memory:', account='a') broker = AccountBroker(':memory:', account='a')
broker.initialize(normalize_timestamp(ts.next())) broker.initialize(start)
info = broker.get_info() info = broker.get_info()
self.assertEqual(info['put_timestamp'], normalize_timestamp(start)) self.assertEqual(info['put_timestamp'], Timestamp(start).internal)
self.assert_(float(info['created_at']) >= start) self.assert_(Timestamp(info['created_at']) >= start)
self.assertEqual(info['delete_timestamp'], '0') self.assertEqual(info['delete_timestamp'], '0')
if self.__class__ == TestAccountBrokerBeforeMetadata: if self.__class__ == TestAccountBrokerBeforeMetadata:
self.assertEqual(info['status_changed_at'], '0') self.assertEqual(info['status_changed_at'], '0')
else: else:
self.assertEqual(info['status_changed_at'], self.assertEqual(info['status_changed_at'],
normalize_timestamp(start)) Timestamp(start).internal)
# delete it # delete it
delete_timestamp = normalize_timestamp(ts.next()) delete_timestamp = ts.next()
broker.delete_db(delete_timestamp) broker.delete_db(delete_timestamp)
info = broker.get_info() info = broker.get_info()
self.assertEqual(info['put_timestamp'], normalize_timestamp(start)) self.assertEqual(info['put_timestamp'], Timestamp(start).internal)
self.assert_(float(info['created_at']) >= start) self.assert_(Timestamp(info['created_at']) >= start)
self.assertEqual(info['delete_timestamp'], delete_timestamp) self.assertEqual(info['delete_timestamp'], delete_timestamp)
self.assertEqual(info['status_changed_at'], delete_timestamp) self.assertEqual(info['status_changed_at'], delete_timestamp)
def test_delete_container(self): def test_delete_container(self):
# Test AccountBroker.delete_container # Test AccountBroker.delete_container
broker = AccountBroker(':memory:', account='a') broker = AccountBroker(':memory:', account='a')
broker.initialize(normalize_timestamp('1')) broker.initialize(Timestamp('1').internal)
broker.put_container('o', normalize_timestamp(time()), 0, 0, 0, broker.put_container('o', Timestamp(time()).internal, 0, 0, 0,
POLICIES.default.idx) POLICIES.default.idx)
with broker.get() as conn: with broker.get() as conn:
self.assertEqual(conn.execute( self.assertEqual(conn.execute(
@ -199,7 +199,7 @@ class TestAccountBroker(unittest.TestCase):
"SELECT count(*) FROM container " "SELECT count(*) FROM container "
"WHERE deleted = 1").fetchone()[0], 0) "WHERE deleted = 1").fetchone()[0], 0)
sleep(.00001) sleep(.00001)
broker.put_container('o', 0, normalize_timestamp(time()), 0, 0, broker.put_container('o', 0, Timestamp(time()).internal, 0, 0,
POLICIES.default.idx) POLICIES.default.idx)
with broker.get() as conn: with broker.get() as conn:
self.assertEqual(conn.execute( self.assertEqual(conn.execute(
@ -212,10 +212,10 @@ class TestAccountBroker(unittest.TestCase):
def test_put_container(self): def test_put_container(self):
# Test AccountBroker.put_container # Test AccountBroker.put_container
broker = AccountBroker(':memory:', account='a') broker = AccountBroker(':memory:', account='a')
broker.initialize(normalize_timestamp('1')) broker.initialize(Timestamp('1').internal)
# Create initial container # Create initial container
timestamp = normalize_timestamp(time()) timestamp = Timestamp(time()).internal
broker.put_container('"{<container \'&\' name>}"', timestamp, 0, 0, 0, broker.put_container('"{<container \'&\' name>}"', timestamp, 0, 0, 0,
POLICIES.default.idx) POLICIES.default.idx)
with broker.get() as conn: with broker.get() as conn:
@ -243,7 +243,7 @@ class TestAccountBroker(unittest.TestCase):
# Put new event # Put new event
sleep(.00001) sleep(.00001)
timestamp = normalize_timestamp(time()) timestamp = Timestamp(time()).internal
broker.put_container('"{<container \'&\' name>}"', timestamp, 0, 0, 0, broker.put_container('"{<container \'&\' name>}"', timestamp, 0, 0, 0,
POLICIES.default.idx) POLICIES.default.idx)
with broker.get() as conn: with broker.get() as conn:
@ -257,7 +257,7 @@ class TestAccountBroker(unittest.TestCase):
"SELECT deleted FROM container").fetchone()[0], 0) "SELECT deleted FROM container").fetchone()[0], 0)
# Put old event # Put old event
otimestamp = normalize_timestamp(float(timestamp) - 1) otimestamp = Timestamp(float(Timestamp(timestamp)) - 1).internal
broker.put_container('"{<container \'&\' name>}"', otimestamp, 0, 0, 0, broker.put_container('"{<container \'&\' name>}"', otimestamp, 0, 0, 0,
POLICIES.default.idx) POLICIES.default.idx)
with broker.get() as conn: with broker.get() as conn:
@ -271,7 +271,7 @@ class TestAccountBroker(unittest.TestCase):
"SELECT deleted FROM container").fetchone()[0], 0) "SELECT deleted FROM container").fetchone()[0], 0)
# Put old delete event # Put old delete event
dtimestamp = normalize_timestamp(float(timestamp) - 1) dtimestamp = Timestamp(float(Timestamp(timestamp)) - 1).internal
broker.put_container('"{<container \'&\' name>}"', 0, dtimestamp, 0, 0, broker.put_container('"{<container \'&\' name>}"', 0, dtimestamp, 0, 0,
POLICIES.default.idx) POLICIES.default.idx)
with broker.get() as conn: with broker.get() as conn:
@ -289,7 +289,7 @@ class TestAccountBroker(unittest.TestCase):
# Put new delete event # Put new delete event
sleep(.00001) sleep(.00001)
timestamp = normalize_timestamp(time()) timestamp = Timestamp(time()).internal
broker.put_container('"{<container \'&\' name>}"', 0, timestamp, 0, 0, broker.put_container('"{<container \'&\' name>}"', 0, timestamp, 0, 0,
POLICIES.default.idx) POLICIES.default.idx)
with broker.get() as conn: with broker.get() as conn:
@ -304,7 +304,7 @@ class TestAccountBroker(unittest.TestCase):
# Put new event # Put new event
sleep(.00001) sleep(.00001)
timestamp = normalize_timestamp(time()) timestamp = Timestamp(time()).internal
broker.put_container('"{<container \'&\' name>}"', timestamp, 0, 0, 0, broker.put_container('"{<container \'&\' name>}"', timestamp, 0, 0, 0,
POLICIES.default.idx) POLICIES.default.idx)
with broker.get() as conn: with broker.get() as conn:
@ -320,46 +320,46 @@ class TestAccountBroker(unittest.TestCase):
def test_get_info(self): def test_get_info(self):
# Test AccountBroker.get_info # Test AccountBroker.get_info
broker = AccountBroker(':memory:', account='test1') broker = AccountBroker(':memory:', account='test1')
broker.initialize(normalize_timestamp('1')) broker.initialize(Timestamp('1').internal)
info = broker.get_info() info = broker.get_info()
self.assertEqual(info['account'], 'test1') self.assertEqual(info['account'], 'test1')
self.assertEqual(info['hash'], '00000000000000000000000000000000') self.assertEqual(info['hash'], '00000000000000000000000000000000')
self.assertEqual(info['put_timestamp'], normalize_timestamp(1)) self.assertEqual(info['put_timestamp'], Timestamp(1).internal)
self.assertEqual(info['delete_timestamp'], '0') self.assertEqual(info['delete_timestamp'], '0')
if self.__class__ == TestAccountBrokerBeforeMetadata: if self.__class__ == TestAccountBrokerBeforeMetadata:
self.assertEqual(info['status_changed_at'], '0') self.assertEqual(info['status_changed_at'], '0')
else: else:
self.assertEqual(info['status_changed_at'], normalize_timestamp(1)) self.assertEqual(info['status_changed_at'], Timestamp(1).internal)
info = broker.get_info() info = broker.get_info()
self.assertEqual(info['container_count'], 0) self.assertEqual(info['container_count'], 0)
broker.put_container('c1', normalize_timestamp(time()), 0, 0, 0, broker.put_container('c1', Timestamp(time()).internal, 0, 0, 0,
POLICIES.default.idx) POLICIES.default.idx)
info = broker.get_info() info = broker.get_info()
self.assertEqual(info['container_count'], 1) self.assertEqual(info['container_count'], 1)
sleep(.00001) sleep(.00001)
broker.put_container('c2', normalize_timestamp(time()), 0, 0, 0, broker.put_container('c2', Timestamp(time()).internal, 0, 0, 0,
POLICIES.default.idx) POLICIES.default.idx)
info = broker.get_info() info = broker.get_info()
self.assertEqual(info['container_count'], 2) self.assertEqual(info['container_count'], 2)
sleep(.00001) sleep(.00001)
broker.put_container('c2', normalize_timestamp(time()), 0, 0, 0, broker.put_container('c2', Timestamp(time()).internal, 0, 0, 0,
POLICIES.default.idx) POLICIES.default.idx)
info = broker.get_info() info = broker.get_info()
self.assertEqual(info['container_count'], 2) self.assertEqual(info['container_count'], 2)
sleep(.00001) sleep(.00001)
broker.put_container('c1', 0, normalize_timestamp(time()), 0, 0, broker.put_container('c1', 0, Timestamp(time()).internal, 0, 0,
POLICIES.default.idx) POLICIES.default.idx)
info = broker.get_info() info = broker.get_info()
self.assertEqual(info['container_count'], 1) self.assertEqual(info['container_count'], 1)
sleep(.00001) sleep(.00001)
broker.put_container('c2', 0, normalize_timestamp(time()), 0, 0, broker.put_container('c2', 0, Timestamp(time()).internal, 0, 0,
POLICIES.default.idx) POLICIES.default.idx)
info = broker.get_info() info = broker.get_info()
self.assertEqual(info['container_count'], 0) self.assertEqual(info['container_count'], 0)
@ -367,20 +367,20 @@ class TestAccountBroker(unittest.TestCase):
def test_list_containers_iter(self): def test_list_containers_iter(self):
# Test AccountBroker.list_containers_iter # Test AccountBroker.list_containers_iter
broker = AccountBroker(':memory:', account='a') broker = AccountBroker(':memory:', account='a')
broker.initialize(normalize_timestamp('1')) broker.initialize(Timestamp('1').internal)
for cont1 in xrange(4): for cont1 in xrange(4):
for cont2 in xrange(125): for cont2 in xrange(125):
broker.put_container('%d-%04d' % (cont1, cont2), broker.put_container('%d-%04d' % (cont1, cont2),
normalize_timestamp(time()), 0, 0, 0, Timestamp(time()).internal, 0, 0, 0,
POLICIES.default.idx) POLICIES.default.idx)
for cont in xrange(125): for cont in xrange(125):
broker.put_container('2-0051-%04d' % cont, broker.put_container('2-0051-%04d' % cont,
normalize_timestamp(time()), 0, 0, 0, Timestamp(time()).internal, 0, 0, 0,
POLICIES.default.idx) POLICIES.default.idx)
for cont in xrange(125): for cont in xrange(125):
broker.put_container('3-%04d-0049' % cont, broker.put_container('3-%04d-0049' % cont,
normalize_timestamp(time()), 0, 0, 0, Timestamp(time()).internal, 0, 0, 0,
POLICIES.default.idx) POLICIES.default.idx)
listing = broker.list_containers_iter(100, '', None, None, '') listing = broker.list_containers_iter(100, '', None, None, '')
@ -445,7 +445,7 @@ class TestAccountBroker(unittest.TestCase):
'3-0047-', '3-0048', '3-0048-', '3-0049', '3-0047-', '3-0048', '3-0048-', '3-0049',
'3-0049-', '3-0050']) '3-0049-', '3-0050'])
broker.put_container('3-0049-', normalize_timestamp(time()), 0, 0, 0, broker.put_container('3-0049-', Timestamp(time()).internal, 0, 0, 0,
POLICIES.default.idx) POLICIES.default.idx)
listing = broker.list_containers_iter(10, '3-0048', None, None, None) listing = broker.list_containers_iter(10, '3-0048', None, None, None)
self.assertEqual(len(listing), 10) self.assertEqual(len(listing), 10)
@ -470,26 +470,26 @@ class TestAccountBroker(unittest.TestCase):
# Test AccountBroker.list_containers_iter for an # Test AccountBroker.list_containers_iter for an
# account that has an odd container with a trailing delimiter # account that has an odd container with a trailing delimiter
broker = AccountBroker(':memory:', account='a') broker = AccountBroker(':memory:', account='a')
broker.initialize(normalize_timestamp('1')) broker.initialize(Timestamp('1').internal)
broker.put_container('a', normalize_timestamp(time()), 0, 0, 0, broker.put_container('a', Timestamp(time()).internal, 0, 0, 0,
POLICIES.default.idx) POLICIES.default.idx)
broker.put_container('a-', normalize_timestamp(time()), 0, 0, 0, broker.put_container('a-', Timestamp(time()).internal, 0, 0, 0,
POLICIES.default.idx) POLICIES.default.idx)
broker.put_container('a-a', normalize_timestamp(time()), 0, 0, 0, broker.put_container('a-a', Timestamp(time()).internal, 0, 0, 0,
POLICIES.default.idx) POLICIES.default.idx)
broker.put_container('a-a-a', normalize_timestamp(time()), 0, 0, 0, broker.put_container('a-a-a', Timestamp(time()).internal, 0, 0, 0,
POLICIES.default.idx) POLICIES.default.idx)
broker.put_container('a-a-b', normalize_timestamp(time()), 0, 0, 0, broker.put_container('a-a-b', Timestamp(time()).internal, 0, 0, 0,
POLICIES.default.idx) POLICIES.default.idx)
broker.put_container('a-b', normalize_timestamp(time()), 0, 0, 0, broker.put_container('a-b', Timestamp(time()).internal, 0, 0, 0,
POLICIES.default.idx) POLICIES.default.idx)
broker.put_container('b', normalize_timestamp(time()), 0, 0, 0, broker.put_container('b', Timestamp(time()).internal, 0, 0, 0,
POLICIES.default.idx) POLICIES.default.idx)
broker.put_container('b-a', normalize_timestamp(time()), 0, 0, 0, broker.put_container('b-a', Timestamp(time()).internal, 0, 0, 0,
POLICIES.default.idx) POLICIES.default.idx)
broker.put_container('b-b', normalize_timestamp(time()), 0, 0, 0, broker.put_container('b-b', Timestamp(time()).internal, 0, 0, 0,
POLICIES.default.idx) POLICIES.default.idx)
broker.put_container('c', normalize_timestamp(time()), 0, 0, 0, broker.put_container('c', Timestamp(time()).internal, 0, 0, 0,
POLICIES.default.idx) POLICIES.default.idx)
listing = broker.list_containers_iter(15, None, None, None, None) listing = broker.list_containers_iter(15, None, None, None, None)
self.assertEqual(len(listing), 10) self.assertEqual(len(listing), 10)
@ -510,27 +510,30 @@ class TestAccountBroker(unittest.TestCase):
def test_chexor(self): def test_chexor(self):
broker = AccountBroker(':memory:', account='a') broker = AccountBroker(':memory:', account='a')
broker.initialize(normalize_timestamp('1')) broker.initialize(Timestamp('1').internal)
broker.put_container('a', normalize_timestamp(1), broker.put_container('a', Timestamp(1).internal,
normalize_timestamp(0), 0, 0, Timestamp(0).internal, 0, 0,
POLICIES.default.idx) POLICIES.default.idx)
broker.put_container('b', normalize_timestamp(2), broker.put_container('b', Timestamp(2).internal,
normalize_timestamp(0), 0, 0, Timestamp(0).internal, 0, 0,
POLICIES.default.idx) POLICIES.default.idx)
hasha = hashlib.md5( hasha = hashlib.md5(
'%s-%s' % ('a', '0000000001.00000-0000000000.00000-0-0') '%s-%s' % ('a', "%s-%s-%s-%s" % (
Timestamp(1).internal, Timestamp(0).internal, 0, 0))
).digest() ).digest()
hashb = hashlib.md5( hashb = hashlib.md5(
'%s-%s' % ('b', '0000000002.00000-0000000000.00000-0-0') '%s-%s' % ('b', "%s-%s-%s-%s" % (
Timestamp(2).internal, Timestamp(0).internal, 0, 0))
).digest() ).digest()
hashc = \ hashc = \
''.join(('%02x' % (ord(a) ^ ord(b)) for a, b in zip(hasha, hashb))) ''.join(('%02x' % (ord(a) ^ ord(b)) for a, b in zip(hasha, hashb)))
self.assertEqual(broker.get_info()['hash'], hashc) self.assertEqual(broker.get_info()['hash'], hashc)
broker.put_container('b', normalize_timestamp(3), broker.put_container('b', Timestamp(3).internal,
normalize_timestamp(0), 0, 0, Timestamp(0).internal, 0, 0,
POLICIES.default.idx) POLICIES.default.idx)
hashb = hashlib.md5( hashb = hashlib.md5(
'%s-%s' % ('b', '0000000003.00000-0000000000.00000-0-0') '%s-%s' % ('b', "%s-%s-%s-%s" % (
Timestamp(3).internal, Timestamp(0).internal, 0, 0))
).digest() ).digest()
hashc = \ hashc = \
''.join(('%02x' % (ord(a) ^ ord(b)) for a, b in zip(hasha, hashb))) ''.join(('%02x' % (ord(a) ^ ord(b)) for a, b in zip(hasha, hashb)))
@ -538,12 +541,12 @@ class TestAccountBroker(unittest.TestCase):
def test_merge_items(self): def test_merge_items(self):
broker1 = AccountBroker(':memory:', account='a') broker1 = AccountBroker(':memory:', account='a')
broker1.initialize(normalize_timestamp('1')) broker1.initialize(Timestamp('1').internal)
broker2 = AccountBroker(':memory:', account='a') broker2 = AccountBroker(':memory:', account='a')
broker2.initialize(normalize_timestamp('1')) broker2.initialize(Timestamp('1').internal)
broker1.put_container('a', normalize_timestamp(1), 0, 0, 0, broker1.put_container('a', Timestamp(1).internal, 0, 0, 0,
POLICIES.default.idx) POLICIES.default.idx)
broker1.put_container('b', normalize_timestamp(2), 0, 0, 0, broker1.put_container('b', Timestamp(2).internal, 0, 0, 0,
POLICIES.default.idx) POLICIES.default.idx)
id = broker1.get_info()['id'] id = broker1.get_info()['id']
broker2.merge_items(broker1.get_items_since( broker2.merge_items(broker1.get_items_since(
@ -551,7 +554,7 @@ class TestAccountBroker(unittest.TestCase):
items = broker2.get_items_since(-1, 1000) items = broker2.get_items_since(-1, 1000)
self.assertEqual(len(items), 2) self.assertEqual(len(items), 2)
self.assertEqual(['a', 'b'], sorted([rec['name'] for rec in items])) self.assertEqual(['a', 'b'], sorted([rec['name'] for rec in items]))
broker1.put_container('c', normalize_timestamp(3), 0, 0, 0, broker1.put_container('c', Timestamp(3).internal, 0, 0, 0,
POLICIES.default.idx) POLICIES.default.idx)
broker2.merge_items(broker1.get_items_since( broker2.merge_items(broker1.get_items_since(
broker2.get_sync(id), 1000), id) broker2.get_sync(id), 1000), id)
@ -567,14 +570,14 @@ class TestAccountBroker(unittest.TestCase):
broker_path = os.path.join(tempdir, 'test-load-old.db') broker_path = os.path.join(tempdir, 'test-load-old.db')
try: try:
broker = AccountBroker(broker_path, account='real') broker = AccountBroker(broker_path, account='real')
broker.initialize(normalize_timestamp(1)) broker.initialize(Timestamp(1).internal)
with open(broker_path + '.pending', 'a+b') as pending: with open(broker_path + '.pending', 'a+b') as pending:
pending.write(':') pending.write(':')
pending.write(pickle.dumps( pending.write(pickle.dumps(
# name, put_timestamp, delete_timestamp, object_count, # name, put_timestamp, delete_timestamp, object_count,
# bytes_used, deleted # bytes_used, deleted
('oldcon', normalize_timestamp(200), ('oldcon', Timestamp(200).internal,
normalize_timestamp(0), Timestamp(0).internal,
896, 9216695, 0)).encode('base64')) 896, 9216695, 0)).encode('base64'))
broker._commit_puts() broker._commit_puts()
@ -593,9 +596,9 @@ class TestAccountBroker(unittest.TestCase):
StoragePolicy(2, 'two', False), StoragePolicy(2, 'two', False),
StoragePolicy(3, 'three', False)]) StoragePolicy(3, 'three', False)])
def test_get_policy_stats(self): def test_get_policy_stats(self):
ts = itertools.count() ts = (Timestamp(t).internal for t in itertools.count(int(time())))
broker = AccountBroker(':memory:', account='a') broker = AccountBroker(':memory:', account='a')
broker.initialize(normalize_timestamp(ts.next())) broker.initialize(ts.next())
# check empty policy_stats # check empty policy_stats
self.assertTrue(broker.empty()) self.assertTrue(broker.empty())
policy_stats = broker.get_policy_stats() policy_stats = broker.get_policy_stats()
@ -604,7 +607,7 @@ class TestAccountBroker(unittest.TestCase):
# add some empty containers # add some empty containers
for policy in POLICIES: for policy in POLICIES:
container_name = 'c-%s' % policy.name container_name = 'c-%s' % policy.name
put_timestamp = normalize_timestamp(ts.next()) put_timestamp = ts.next()
broker.put_container(container_name, broker.put_container(container_name,
put_timestamp, 0, put_timestamp, 0,
0, 0, 0, 0,
@ -618,7 +621,7 @@ class TestAccountBroker(unittest.TestCase):
# update the containers object & byte count # update the containers object & byte count
for policy in POLICIES: for policy in POLICIES:
container_name = 'c-%s' % policy.name container_name = 'c-%s' % policy.name
put_timestamp = normalize_timestamp(ts.next()) put_timestamp = ts.next()
count = policy.idx * 100 # good as any integer count = policy.idx * 100 # good as any integer
broker.put_container(container_name, broker.put_container(container_name,
put_timestamp, 0, put_timestamp, 0,
@ -640,7 +643,7 @@ class TestAccountBroker(unittest.TestCase):
# now delete the containers one by one # now delete the containers one by one
for policy in POLICIES: for policy in POLICIES:
container_name = 'c-%s' % policy.name container_name = 'c-%s' % policy.name
delete_timestamp = normalize_timestamp(ts.next()) delete_timestamp = ts.next()
broker.put_container(container_name, broker.put_container(container_name,
0, delete_timestamp, 0, delete_timestamp,
0, 0, 0, 0,
@ -654,9 +657,9 @@ class TestAccountBroker(unittest.TestCase):
@patch_policies([StoragePolicy(0, 'zero', False), @patch_policies([StoragePolicy(0, 'zero', False),
StoragePolicy(1, 'one', True)]) StoragePolicy(1, 'one', True)])
def test_policy_stats_tracking(self): def test_policy_stats_tracking(self):
ts = itertools.count() ts = (Timestamp(t).internal for t in itertools.count(int(time())))
broker = AccountBroker(':memory:', account='a') broker = AccountBroker(':memory:', account='a')
broker.initialize(normalize_timestamp(ts.next())) broker.initialize(ts.next())
# policy 0 # policy 0
broker.put_container('con1', ts.next(), 0, 12, 2798641, 0) broker.put_container('con1', ts.next(), 0, 12, 2798641, 0)
@ -731,7 +734,7 @@ def premetadata_create_account_stat_table(self, conn, put_timestamp):
conn.execute(''' conn.execute('''
UPDATE account_stat SET account = ?, created_at = ?, id = ?, UPDATE account_stat SET account = ?, created_at = ?, id = ?,
put_timestamp = ? put_timestamp = ?
''', (self.account, normalize_timestamp(time()), str(uuid4()), ''', (self.account, Timestamp(time()).internal, str(uuid4()),
put_timestamp)) put_timestamp))
@ -764,7 +767,7 @@ class TestAccountBrokerBeforeMetadata(TestAccountBroker):
AccountBroker.create_account_stat_table = \ AccountBroker.create_account_stat_table = \
premetadata_create_account_stat_table premetadata_create_account_stat_table
broker = AccountBroker(':memory:', account='a') broker = AccountBroker(':memory:', account='a')
broker.initialize(normalize_timestamp('1')) broker.initialize(Timestamp('1').internal)
exc = None exc = None
with broker.get() as conn: with broker.get() as conn:
try: try:
@ -777,7 +780,7 @@ class TestAccountBrokerBeforeMetadata(TestAccountBroker):
AccountBroker.create_account_stat_table = \ AccountBroker.create_account_stat_table = \
self._imported_create_account_stat_table self._imported_create_account_stat_table
broker = AccountBroker(':memory:', account='a') broker = AccountBroker(':memory:', account='a')
broker.initialize(normalize_timestamp('1')) broker.initialize(Timestamp('1').internal)
with broker.get() as conn: with broker.get() as conn:
conn.execute('SELECT metadata FROM account_stat') conn.execute('SELECT metadata FROM account_stat')
@ -851,7 +854,7 @@ class TestAccountBrokerBeforeSPI(TestAccountBroker):
self._imported_initialize = AccountBroker._initialize self._imported_initialize = AccountBroker._initialize
AccountBroker._initialize = prespi_AccountBroker_initialize AccountBroker._initialize = prespi_AccountBroker_initialize
broker = AccountBroker(':memory:', account='a') broker = AccountBroker(':memory:', account='a')
broker.initialize(normalize_timestamp('1')) broker.initialize(Timestamp('1').internal)
exc = None exc = None
with broker.get() as conn: with broker.get() as conn:
try: try:
@ -872,7 +875,7 @@ class TestAccountBrokerBeforeSPI(TestAccountBroker):
self._imported_create_container_table self._imported_create_container_table
AccountBroker._initialize = self._imported_initialize AccountBroker._initialize = self._imported_initialize
broker = AccountBroker(':memory:', account='a') broker = AccountBroker(':memory:', account='a')
broker.initialize(normalize_timestamp('1')) broker.initialize(Timestamp('1').internal)
with broker.get() as conn: with broker.get() as conn:
conn.execute('SELECT storage_policy_index FROM container') conn.execute('SELECT storage_policy_index FROM container')
@ -882,7 +885,7 @@ class TestAccountBrokerBeforeSPI(TestAccountBroker):
# first init an acct DB without the policy_stat table present # first init an acct DB without the policy_stat table present
broker = AccountBroker(db_path, account='a') broker = AccountBroker(db_path, account='a')
broker.initialize(normalize_timestamp('1')) broker.initialize(Timestamp('1').internal)
with broker.get() as conn: with broker.get() as conn:
try: try:
conn.execute(''' conn.execute('''
@ -900,7 +903,7 @@ class TestAccountBrokerBeforeSPI(TestAccountBroker):
self.assertEqual(len(stats), 0) self.assertEqual(len(stats), 0)
# now do a PUT to create the table # now do a PUT to create the table
broker.put_container('o', normalize_timestamp(time()), 0, 0, 0, broker.put_container('o', Timestamp(time()).internal, 0, 0, 0,
POLICIES.default.idx) POLICIES.default.idx)
broker._commit_puts_stale_ok() broker._commit_puts_stale_ok()
@ -918,7 +921,7 @@ class TestAccountBrokerBeforeSPI(TestAccountBroker):
# first init an acct DB without the policy_stat table present # first init an acct DB without the policy_stat table present
broker = AccountBroker(db_path, account='a') broker = AccountBroker(db_path, account='a')
broker.initialize(normalize_timestamp('1')) broker.initialize(Timestamp('1').internal)
with broker.get() as conn: with broker.get() as conn:
try: try:
conn.execute(''' conn.execute('''
@ -940,7 +943,7 @@ class TestAccountBrokerBeforeSPI(TestAccountBroker):
delete_timestamp, object_count, bytes_used, delete_timestamp, object_count, bytes_used,
deleted) deleted)
VALUES (?, ?, ?, ?, ?, ?) VALUES (?, ?, ?, ?, ?, ?)
''', ('test_name', normalize_timestamp(time()), 0, 1, 2, 0)) ''', ('test_name', Timestamp(time()).internal, 0, 1, 2, 0))
conn.commit() conn.commit()
# make sure we can iter containers without the migration # make sure we can iter containers without the migration
@ -955,7 +958,7 @@ class TestAccountBrokerBeforeSPI(TestAccountBroker):
# which will update the DB schema as well as update policy_stats # which will update the DB schema as well as update policy_stats
# for legacy containers in the DB (those without an SPI) # for legacy containers in the DB (those without an SPI)
other_policy = [p for p in POLICIES if p.idx != 0][0] other_policy = [p for p in POLICIES if p.idx != 0][0]
broker.put_container('test_second', normalize_timestamp(time()), broker.put_container('test_second', Timestamp(time()).internal,
0, 3, 4, other_policy.idx) 0, 3, 4, other_policy.idx)
broker._commit_puts_stale_ok() broker._commit_puts_stale_ok()
@ -994,14 +997,15 @@ class TestAccountBrokerBeforeSPI(TestAccountBroker):
def test_half_upgraded_database(self, tempdir): def test_half_upgraded_database(self, tempdir):
db_path = os.path.join(tempdir, 'account.db') db_path = os.path.join(tempdir, 'account.db')
ts = itertools.count() ts = itertools.count()
ts = (Timestamp(t).internal for t in itertools.count(int(time())))
broker = AccountBroker(db_path, account='a') broker = AccountBroker(db_path, account='a')
broker.initialize(normalize_timestamp(ts.next())) broker.initialize(ts.next())
self.assertTrue(broker.empty()) self.assertTrue(broker.empty())
# add a container (to pending file) # add a container (to pending file)
broker.put_container('c', normalize_timestamp(ts.next()), 0, 0, 0, broker.put_container('c', ts.next(), 0, 0, 0,
POLICIES.default.idx) POLICIES.default.idx)
real_get = broker.get real_get = broker.get

View File

@ -0,0 +1,128 @@
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import itertools
import time
import unittest
import mock
from swift.account import utils, backend
from swift.common.storage_policy import POLICIES
from swift.common.utils import Timestamp
from swift.common.swob import HeaderKeyDict
from test.unit import patch_policies
class TestFakeAccountBroker(unittest.TestCase):
def test_fake_broker_get_info(self):
broker = utils.FakeAccountBroker()
now = time.time()
with mock.patch('time.time', new=lambda: now):
info = broker.get_info()
timestamp = Timestamp(now)
expected = {
'container_count': 0,
'object_count': 0,
'bytes_used': 0,
'created_at': timestamp.internal,
'put_timestamp': timestamp.internal,
}
self.assertEqual(info, expected)
def test_fake_broker_list_containers_iter(self):
broker = utils.FakeAccountBroker()
self.assertEqual(broker.list_containers_iter(), [])
def test_fake_broker_metadata(self):
broker = utils.FakeAccountBroker()
self.assertEqual(broker.metadata, {})
def test_fake_broker_get_policy_stats(self):
broker = utils.FakeAccountBroker()
self.assertEqual(broker.get_policy_stats(), {})
class TestAccountUtils(unittest.TestCase):
def test_get_response_headers_fake_broker(self):
broker = utils.FakeAccountBroker()
now = time.time()
expected = {
'X-Account-Container-Count': 0,
'X-Account-Object-Count': 0,
'X-Account-Bytes-Used': 0,
'X-Timestamp': Timestamp(now).normal,
'X-PUT-Timestamp': Timestamp(now).normal,
}
with mock.patch('time.time', new=lambda: now):
resp_headers = utils.get_response_headers(broker)
self.assertEqual(resp_headers, expected)
def test_get_response_headers_empty_memory_broker(self):
broker = backend.AccountBroker(':memory:', account='a')
now = time.time()
with mock.patch('time.time', new=lambda: now):
broker.initialize(Timestamp(now).internal)
expected = {
'X-Account-Container-Count': 0,
'X-Account-Object-Count': 0,
'X-Account-Bytes-Used': 0,
'X-Timestamp': Timestamp(now).normal,
'X-PUT-Timestamp': Timestamp(now).normal,
}
resp_headers = utils.get_response_headers(broker)
self.assertEqual(resp_headers, expected)
@patch_policies
def test_get_response_headers_with_data(self):
broker = backend.AccountBroker(':memory:', account='a')
now = time.time()
with mock.patch('time.time', new=lambda: now):
broker.initialize(Timestamp(now).internal)
# add some container data
ts = (Timestamp(t).internal for t in itertools.count(int(now)))
total_containers = 0
total_objects = 0
total_bytes = 0
for policy in POLICIES:
delete_timestamp = ts.next()
put_timestamp = ts.next()
object_count = int(policy)
bytes_used = int(policy) * 10
broker.put_container('c-%s' % policy.name, put_timestamp,
delete_timestamp, object_count, bytes_used,
int(policy))
total_containers += 1
total_objects += object_count
total_bytes += bytes_used
expected = HeaderKeyDict({
'X-Account-Container-Count': total_containers,
'X-Account-Object-Count': total_objects,
'X-Account-Bytes-Used': total_bytes,
'X-Timestamp': Timestamp(now).normal,
'X-PUT-Timestamp': Timestamp(now).normal,
})
for policy in POLICIES:
prefix = 'X-Account-Storage-Policy-%s-' % policy.name
expected[prefix + 'Object-Count'] = int(policy)
expected[prefix + 'Bytes-Used'] = int(policy) * 10
resp_headers = utils.get_response_headers(broker)
for key, value in resp_headers.items():
expected_value = expected.pop(key)
self.assertEqual(expected_value, str(value),
'value for %r was %r not %r' % (
key, value, expected_value))
self.assertFalse(expected)

View File

@ -115,10 +115,10 @@ class TestCliInfo(TestCliInfoBase):
Account: acct Account: acct
Account Hash: dc5be2aa4347a22a0fee6bc7de505b47 Account Hash: dc5be2aa4347a22a0fee6bc7de505b47
Metadata: Metadata:
Created at: 1970-01-01 00:01:40.100000 (100.1) Created at: 1970-01-01T00:01:40.100000 (100.1)
Put Timestamp: 1970-01-01 00:01:46.300000 (106.3) Put Timestamp: 1970-01-01T00:01:46.300000 (106.3)
Delete Timestamp: 1970-01-01 00:01:47.900000 (107.9) Delete Timestamp: 1970-01-01T00:01:47.900000 (107.9)
Status Timestamp: 1970-01-01 00:01:48.300000 (108.3) Status Timestamp: 1970-01-01T00:01:48.300000 (108.3)
Container Count: 3 Container Count: 3
Object Count: 20 Object Count: 20
Bytes Used: 42 Bytes Used: 42
@ -158,15 +158,15 @@ No system metadata found in db file
Container: cont Container: cont
Container Hash: d49d0ecbb53be1fcc49624f2f7c7ccae Container Hash: d49d0ecbb53be1fcc49624f2f7c7ccae
Metadata: Metadata:
Created at: 1970-01-01 00:01:40.100000 (0000000100.10000) Created at: 1970-01-01T00:01:40.100000 (0000000100.10000)
Put Timestamp: 1970-01-01 00:01:46.300000 (0000000106.30000) Put Timestamp: 1970-01-01T00:01:46.300000 (0000000106.30000)
Delete Timestamp: 1970-01-01 00:01:47.900000 (0000000107.90000) Delete Timestamp: 1970-01-01T00:01:47.900000 (0000000107.90000)
Status Timestamp: 1970-01-01 00:01:48.300000 (0000000108.30000) Status Timestamp: 1970-01-01T00:01:48.300000 (0000000108.30000)
Object Count: 20 Object Count: 20
Bytes Used: 42 Bytes Used: 42
Storage Policy: %s (0) Storage Policy: %s (0)
Reported Put Timestamp: 1970-01-01 02:48:26.300000 (0000010106.30000) Reported Put Timestamp: 1970-01-01T02:48:26.300000 (0000010106.30000)
Reported Delete Timestamp: 1970-01-01 02:48:27.900000 (0000010107.90000) Reported Delete Timestamp: 1970-01-01T02:48:27.900000 (0000010107.90000)
Reported Object Count: 20 Reported Object Count: 20
Reported Bytes Used: 42 Reported Bytes Used: 42
Chexor: abaddeadbeefcafe Chexor: abaddeadbeefcafe
@ -452,8 +452,9 @@ class TestPrintObjFullMeta(TestCliInfoBase):
Object: dummy Object: dummy
Object hash: 128fdf98bddd1b1e8695f4340e67a67a Object hash: 128fdf98bddd1b1e8695f4340e67a67a
Content-Type: application/octet-stream Content-Type: application/octet-stream
Timestamp: 1970-01-01 00:01:46.300000 (106.3) Timestamp: 1970-01-01T00:01:46.300000 (%s)
User Metadata: {'X-Object-Meta-Mtime': '107.3'}''' User Metadata: {'X-Object-Meta-Mtime': '107.3'}''' % (
utils.Timestamp(106.3).internal)
self.assertEquals(out.getvalue().strip(), exp_out) self.assertEquals(out.getvalue().strip(), exp_out)
@ -469,8 +470,9 @@ User Metadata: {'X-Object-Meta-Mtime': '107.3'}'''
print_obj_metadata(metadata) print_obj_metadata(metadata)
exp_out = '''Path: Not found in metadata exp_out = '''Path: Not found in metadata
Content-Type: application/octet-stream Content-Type: application/octet-stream
Timestamp: 1970-01-01 00:01:46.300000 (106.3) Timestamp: 1970-01-01T00:01:46.300000 (%s)
User Metadata: {'X-Object-Meta-Mtime': '107.3'}''' User Metadata: {'X-Object-Meta-Mtime': '107.3'}''' % (
utils.Timestamp(106.3).internal)
self.assertEquals(out.getvalue().strip(), exp_out) self.assertEquals(out.getvalue().strip(), exp_out)
@ -485,8 +487,9 @@ User Metadata: {'X-Object-Meta-Mtime': '107.3'}'''
Object: dummy Object: dummy
Object hash: 128fdf98bddd1b1e8695f4340e67a67a Object hash: 128fdf98bddd1b1e8695f4340e67a67a
Content-Type: Not found in metadata Content-Type: Not found in metadata
Timestamp: 1970-01-01 00:01:46.300000 (106.3) Timestamp: 1970-01-01T00:01:46.300000 (%s)
User Metadata: {'X-Object-Meta-Mtime': '107.3'}''' User Metadata: {'X-Object-Meta-Mtime': '107.3'}''' % (
utils.Timestamp(106.3).internal)
self.assertEquals(out.getvalue().strip(), exp_out) self.assertEquals(out.getvalue().strip(), exp_out)

View File

@ -16,6 +16,7 @@
import unittest import unittest
import mock import mock
import tempfile import tempfile
import time
from test import safe_repr from test import safe_repr
from test.unit import MockTrue from test.unit import MockTrue
@ -182,6 +183,20 @@ class TestConstraints(unittest.TestCase):
self.assertFalse(constraints.check_float('')) self.assertFalse(constraints.check_float(''))
self.assertTrue(constraints.check_float('0')) self.assertTrue(constraints.check_float('0'))
def test_valid_timestamp(self):
self.assertRaises(HTTPException,
constraints.valid_timestamp,
Request.blank('/'))
self.assertRaises(HTTPException,
constraints.valid_timestamp,
Request.blank('/', headers={
'X-Timestamp': 'asdf'}))
timestamp = utils.Timestamp(time.time())
req = Request.blank('/', headers={'X-Timestamp': timestamp.internal})
self.assertEqual(timestamp, constraints.valid_timestamp(req))
req = Request.blank('/', headers={'X-Timestamp': timestamp.normal})
self.assertEqual(timestamp, constraints.valid_timestamp(req))
def test_check_utf8(self): def test_check_utf8(self):
unicode_sample = u'\uc77c\uc601' unicode_sample = u'\uc77c\uc601'
valid_utf8_str = unicode_sample.encode('utf-8') valid_utf8_str = unicode_sample.encode('utf-8')

View File

@ -35,7 +35,7 @@ import swift.common.db
from swift.common.db import chexor, dict_factory, get_db_connection, \ from swift.common.db import chexor, dict_factory, get_db_connection, \
DatabaseBroker, DatabaseConnectionError, DatabaseAlreadyExists, \ DatabaseBroker, DatabaseConnectionError, DatabaseAlreadyExists, \
GreenDBConnection, PICKLE_PROTOCOL GreenDBConnection, PICKLE_PROTOCOL
from swift.common.utils import normalize_timestamp, mkdirs, json from swift.common.utils import normalize_timestamp, mkdirs, json, Timestamp
from swift.common.exceptions import LockTimeout from swift.common.exceptions import LockTimeout
from test.unit import with_tempdir from test.unit import with_tempdir
@ -217,7 +217,7 @@ class ExampleBroker(DatabaseBroker):
INSERT INTO test_stat ( INSERT INTO test_stat (
created_at, put_timestamp, status_changed_at) created_at, put_timestamp, status_changed_at)
VALUES (?, ?, ?); VALUES (?, ?, ?);
""", (normalize_timestamp(time.time()), put_timestamp, """, (Timestamp(time.time()).internal, put_timestamp,
put_timestamp)) put_timestamp))
def merge_items(self, item_list): def merge_items(self, item_list):
@ -268,8 +268,8 @@ class ExampleBroker(DatabaseBroker):
def _is_deleted(self, conn): def _is_deleted(self, conn):
info = conn.execute('SELECT * FROM test_stat').fetchone() info = conn.execute('SELECT * FROM test_stat').fetchone()
return (info['test_count'] in (None, '', 0, '0')) and \ return (info['test_count'] in (None, '', 0, '0')) and \
(normalize_timestamp(info['delete_timestamp']) > (Timestamp(info['delete_timestamp']) >
normalize_timestamp(info['put_timestamp'])) Timestamp(info['put_timestamp']))
class TestExampleBroker(unittest.TestCase): class TestExampleBroker(unittest.TestCase):
@ -282,7 +282,7 @@ class TestExampleBroker(unittest.TestCase):
policy = 0 policy = 0
def test_merge_timestamps_simple_delete(self): def test_merge_timestamps_simple_delete(self):
ts = (normalize_timestamp(t) for t in ts = (Timestamp(t).internal for t in
itertools.count(int(time.time()))) itertools.count(int(time.time())))
put_timestamp = ts.next() put_timestamp = ts.next()
broker = self.broker_class(':memory:', account='a', container='c') broker = self.broker_class(':memory:', account='a', container='c')
@ -302,8 +302,7 @@ class TestExampleBroker(unittest.TestCase):
self.assertEqual(info['created_at'], created_at) self.assertEqual(info['created_at'], created_at)
self.assertEqual(info['put_timestamp'], put_timestamp) self.assertEqual(info['put_timestamp'], put_timestamp)
self.assertEqual(info['delete_timestamp'], delete_timestamp) self.assertEqual(info['delete_timestamp'], delete_timestamp)
self.assert_(info['status_changed_at'] > self.assert_(info['status_changed_at'] > Timestamp(put_timestamp))
normalize_timestamp(put_timestamp))
def put_item(self, broker, timestamp): def put_item(self, broker, timestamp):
broker.put_test('test', timestamp) broker.put_test('test', timestamp)
@ -312,7 +311,7 @@ class TestExampleBroker(unittest.TestCase):
broker.delete_test('test', timestamp) broker.delete_test('test', timestamp)
def test_merge_timestamps_delete_with_objects(self): def test_merge_timestamps_delete_with_objects(self):
ts = (normalize_timestamp(t) for t in ts = (Timestamp(t).internal for t in
itertools.count(int(time.time()))) itertools.count(int(time.time())))
put_timestamp = ts.next() put_timestamp = ts.next()
broker = self.broker_class(':memory:', account='a', container='c') broker = self.broker_class(':memory:', account='a', container='c')
@ -345,7 +344,7 @@ class TestExampleBroker(unittest.TestCase):
self.assert_(broker.is_deleted()) self.assert_(broker.is_deleted())
def test_merge_timestamps_simple_recreate(self): def test_merge_timestamps_simple_recreate(self):
ts = (normalize_timestamp(t) for t in ts = (Timestamp(t).internal for t in
itertools.count(int(time.time()))) itertools.count(int(time.time())))
put_timestamp = ts.next() put_timestamp = ts.next()
broker = self.broker_class(':memory:', account='a', container='c') broker = self.broker_class(':memory:', account='a', container='c')
@ -361,7 +360,7 @@ class TestExampleBroker(unittest.TestCase):
self.assertEqual(info['delete_timestamp'], delete_timestamp) self.assertEqual(info['delete_timestamp'], delete_timestamp)
orig_status_changed_at = info['status_changed_at'] orig_status_changed_at = info['status_changed_at']
self.assert_(orig_status_changed_at > self.assert_(orig_status_changed_at >
normalize_timestamp(virgin_status_changed_at)) Timestamp(virgin_status_changed_at))
# recreate # recreate
recreate_timestamp = ts.next() recreate_timestamp = ts.next()
status_changed_at = time.time() status_changed_at = time.time()
@ -375,7 +374,7 @@ class TestExampleBroker(unittest.TestCase):
self.assert_(info['status_changed_at'], status_changed_at) self.assert_(info['status_changed_at'], status_changed_at)
def test_merge_timestamps_recreate_with_objects(self): def test_merge_timestamps_recreate_with_objects(self):
ts = (normalize_timestamp(t) for t in ts = (Timestamp(t).internal for t in
itertools.count(int(time.time()))) itertools.count(int(time.time())))
put_timestamp = ts.next() put_timestamp = ts.next()
broker = self.broker_class(':memory:', account='a', container='c') broker = self.broker_class(':memory:', account='a', container='c')
@ -390,8 +389,8 @@ class TestExampleBroker(unittest.TestCase):
self.assertEqual(info['put_timestamp'], put_timestamp) self.assertEqual(info['put_timestamp'], put_timestamp)
self.assertEqual(info['delete_timestamp'], delete_timestamp) self.assertEqual(info['delete_timestamp'], delete_timestamp)
orig_status_changed_at = info['status_changed_at'] orig_status_changed_at = info['status_changed_at']
self.assert_(normalize_timestamp(orig_status_changed_at) >= self.assert_(Timestamp(orig_status_changed_at) >=
normalize_timestamp(put_timestamp)) Timestamp(put_timestamp))
# add object # add object
self.put_item(broker, ts.next()) self.put_item(broker, ts.next())
count_key = '%s_count' % broker.db_contains_type count_key = '%s_count' % broker.db_contains_type
@ -411,7 +410,7 @@ class TestExampleBroker(unittest.TestCase):
self.assertFalse(broker.is_deleted()) self.assertFalse(broker.is_deleted())
def test_merge_timestamps_update_put_no_status_change(self): def test_merge_timestamps_update_put_no_status_change(self):
ts = (normalize_timestamp(t) for t in ts = (Timestamp(t).internal for t in
itertools.count(int(time.time()))) itertools.count(int(time.time())))
put_timestamp = ts.next() put_timestamp = ts.next()
broker = self.broker_class(':memory:', account='a', container='c') broker = self.broker_class(':memory:', account='a', container='c')
@ -426,7 +425,7 @@ class TestExampleBroker(unittest.TestCase):
self.assertEqual(orig_status_changed_at, info['status_changed_at']) self.assertEqual(orig_status_changed_at, info['status_changed_at'])
def test_merge_timestamps_update_delete_no_status_change(self): def test_merge_timestamps_update_delete_no_status_change(self):
ts = (normalize_timestamp(t) for t in ts = (Timestamp(t).internal for t in
itertools.count(int(time.time()))) itertools.count(int(time.time())))
put_timestamp = ts.next() put_timestamp = ts.next()
broker = self.broker_class(':memory:', account='a', container='c') broker = self.broker_class(':memory:', account='a', container='c')
@ -458,15 +457,15 @@ class TestExampleBroker(unittest.TestCase):
broker = self.broker_class(':memory:', account='test', container='c') broker = self.broker_class(':memory:', account='test', container='c')
created_at = time.time() created_at = time.time()
with patch('swift.common.db.time.time', new=lambda: created_at): with patch('swift.common.db.time.time', new=lambda: created_at):
broker.initialize(normalize_timestamp(1), broker.initialize(Timestamp(1).internal,
storage_policy_index=int(self.policy)) storage_policy_index=int(self.policy))
info = broker.get_info() info = broker.get_info()
count_key = '%s_count' % broker.db_contains_type count_key = '%s_count' % broker.db_contains_type
expected = { expected = {
count_key: 0, count_key: 0,
'created_at': normalize_timestamp(created_at), 'created_at': Timestamp(created_at).internal,
'put_timestamp': normalize_timestamp(1), 'put_timestamp': Timestamp(1).internal,
'status_changed_at': normalize_timestamp(1), 'status_changed_at': Timestamp(1).internal,
'delete_timestamp': '0', 'delete_timestamp': '0',
} }
for k, v in expected.items(): for k, v in expected.items():
@ -476,14 +475,14 @@ class TestExampleBroker(unittest.TestCase):
def test_get_raw_metadata(self): def test_get_raw_metadata(self):
broker = self.broker_class(':memory:', account='test', container='c') broker = self.broker_class(':memory:', account='test', container='c')
broker.initialize(normalize_timestamp(0), broker.initialize(Timestamp(0).internal,
storage_policy_index=int(self.policy)) storage_policy_index=int(self.policy))
self.assertEqual(broker.metadata, {}) self.assertEqual(broker.metadata, {})
self.assertEqual(broker.get_raw_metadata(), '') self.assertEqual(broker.get_raw_metadata(), '')
key = u'test\u062a'.encode('utf-8') key = u'test\u062a'.encode('utf-8')
value = u'value\u062a' value = u'value\u062a'
metadata = { metadata = {
key: [value, normalize_timestamp(1)] key: [value, Timestamp(1).internal]
} }
broker.update_metadata(metadata) broker.update_metadata(metadata)
self.assertEqual(broker.metadata, metadata) self.assertEqual(broker.metadata, metadata)
@ -491,7 +490,7 @@ class TestExampleBroker(unittest.TestCase):
json.dumps(metadata)) json.dumps(metadata))
def test_put_timestamp(self): def test_put_timestamp(self):
ts = (normalize_timestamp(t) for t in ts = (Timestamp(t).internal for t in
itertools.count(int(time.time()))) itertools.count(int(time.time())))
broker = self.broker_class(':memory:', account='a', container='c') broker = self.broker_class(':memory:', account='a', container='c')
orig_put_timestamp = ts.next() orig_put_timestamp = ts.next()
@ -514,7 +513,7 @@ class TestExampleBroker(unittest.TestCase):
newer_put_timestamp) newer_put_timestamp)
def test_status_changed_at(self): def test_status_changed_at(self):
ts = (normalize_timestamp(t) for t in ts = (Timestamp(t).internal for t in
itertools.count(int(time.time()))) itertools.count(int(time.time())))
broker = self.broker_class(':memory:', account='test', container='c') broker = self.broker_class(':memory:', account='test', container='c')
put_timestamp = ts.next() put_timestamp = ts.next()
@ -525,7 +524,7 @@ class TestExampleBroker(unittest.TestCase):
self.assertEquals(broker.get_info()['status_changed_at'], self.assertEquals(broker.get_info()['status_changed_at'],
put_timestamp) put_timestamp)
self.assertEquals(broker.get_info()['created_at'], self.assertEquals(broker.get_info()['created_at'],
normalize_timestamp(created_at)) Timestamp(created_at).internal)
status_changed_at = ts.next() status_changed_at = ts.next()
broker.update_status_changed_at(status_changed_at) broker.update_status_changed_at(status_changed_at)
self.assertEqual(broker.get_info()['status_changed_at'], self.assertEqual(broker.get_info()['status_changed_at'],
@ -543,7 +542,7 @@ class TestExampleBroker(unittest.TestCase):
def test_get_syncs(self): def test_get_syncs(self):
broker = self.broker_class(':memory:', account='a', container='c') broker = self.broker_class(':memory:', account='a', container='c')
broker.initialize(normalize_timestamp(time.time()), broker.initialize(Timestamp(time.time()).internal,
storage_policy_index=int(self.policy)) storage_policy_index=int(self.policy))
self.assertEqual([], broker.get_syncs()) self.assertEqual([], broker.get_syncs())
broker.merge_syncs([{'sync_point': 1, 'remote_id': 'remote1'}]) broker.merge_syncs([{'sync_point': 1, 'remote_id': 'remote1'}])
@ -557,7 +556,7 @@ class TestExampleBroker(unittest.TestCase):
@with_tempdir @with_tempdir
def test_commit_pending(self, tempdir): def test_commit_pending(self, tempdir):
ts = (normalize_timestamp(t) for t in ts = (Timestamp(t).internal for t in
itertools.count(int(time.time()))) itertools.count(int(time.time())))
broker = self.broker_class(os.path.join(tempdir, 'test.db'), broker = self.broker_class(os.path.join(tempdir, 'test.db'),
account='a', container='c') account='a', container='c')

View File

@ -25,7 +25,7 @@ import mock
from swift.common import direct_client from swift.common import direct_client
from swift.common.exceptions import ClientException from swift.common.exceptions import ClientException
from swift.common.utils import json, normalize_timestamp from swift.common.utils import json, Timestamp
from swift.common.swob import HeaderKeyDict, RESPONSE_REASONS from swift.common.swob import HeaderKeyDict, RESPONSE_REASONS
from swift.common.storage_policy import POLICY_INDEX, POLICIES from swift.common.storage_policy import POLICY_INDEX, POLICIES
@ -116,9 +116,9 @@ class TestDirectClient(unittest.TestCase):
now = time.time() now = time.time()
headers = direct_client.gen_headers(add_ts=True) headers = direct_client.gen_headers(add_ts=True)
self.assertEqual(headers['user-agent'], stub_user_agent) self.assertEqual(headers['user-agent'], stub_user_agent)
self.assert_(now - 1 < float(headers['x-timestamp']) < now + 1) self.assert_(now - 1 < Timestamp(headers['x-timestamp']) < now + 1)
self.assertEqual(headers['x-timestamp'], self.assertEqual(headers['x-timestamp'],
normalize_timestamp(float(headers['x-timestamp']))) Timestamp(headers['x-timestamp']).internal)
self.assertEqual(2, len(headers)) self.assertEqual(2, len(headers))
headers = direct_client.gen_headers(hdrs_in={'foo-bar': '47'}) headers = direct_client.gen_headers(hdrs_in={'foo-bar': '47'})
@ -142,9 +142,9 @@ class TestDirectClient(unittest.TestCase):
expected_header_count += 1 expected_header_count += 1
self.assertEqual( self.assertEqual(
headers['x-timestamp'], headers['x-timestamp'],
normalize_timestamp(float(headers['x-timestamp']))) Timestamp(headers['x-timestamp']).internal)
self.assert_( self.assert_(
now - 1 < float(headers['x-timestamp']) < now + 1) now - 1 < Timestamp(headers['x-timestamp']) < now + 1)
self.assertEqual(expected_header_count, len(headers)) self.assertEqual(expected_header_count, len(headers))
def test_direct_get_account(self): def test_direct_get_account(self):
@ -275,7 +275,7 @@ class TestDirectClient(unittest.TestCase):
self.assert_('HEAD' in str(err)) self.assert_('HEAD' in str(err))
def test_direct_head_container_deleted(self): def test_direct_head_container_deleted(self):
important_timestamp = normalize_timestamp(time.time()) important_timestamp = Timestamp(time.time()).internal
headers = HeaderKeyDict({'X-Backend-Important-Timestamp': headers = HeaderKeyDict({'X-Backend-Important-Timestamp':
important_timestamp}) important_timestamp})
@ -432,7 +432,7 @@ class TestDirectClient(unittest.TestCase):
self.assert_('HEAD' in str(err)) self.assert_('HEAD' in str(err))
def test_direct_head_object_not_found(self): def test_direct_head_object_not_found(self):
important_timestamp = normalize_timestamp(time.time()) important_timestamp = Timestamp(time.time()).internal
stub_headers = {'X-Backend-Important-Timestamp': important_timestamp} stub_headers = {'X-Backend-Important-Timestamp': important_timestamp}
with mocked_http_conn(404, headers=stub_headers) as conn: with mocked_http_conn(404, headers=stub_headers) as conn:
try: try:

View File

@ -23,6 +23,7 @@ from StringIO import StringIO
from urllib import quote from urllib import quote
import swift.common.swob import swift.common.swob
from swift.common import utils, exceptions
class TestHeaderEnvironProxy(unittest.TestCase): class TestHeaderEnvironProxy(unittest.TestCase):
@ -464,6 +465,25 @@ class TestRequest(unittest.TestCase):
self.assertEquals(req.params['a'], 'b') self.assertEquals(req.params['a'], 'b')
self.assertEquals(req.params['c'], 'd') self.assertEquals(req.params['c'], 'd')
def test_timestamp_missing(self):
req = swift.common.swob.Request.blank('/')
self.assertRaises(exceptions.InvalidTimestamp,
getattr, req, 'timestamp')
def test_timestamp_invalid(self):
req = swift.common.swob.Request.blank(
'/', headers={'X-Timestamp': 'asdf'})
self.assertRaises(exceptions.InvalidTimestamp,
getattr, req, 'timestamp')
def test_timestamp(self):
req = swift.common.swob.Request.blank(
'/', headers={'X-Timestamp': '1402447134.13507_00000001'})
expected = utils.Timestamp('1402447134.13507', offset=1)
self.assertEqual(req.timestamp, expected)
self.assertEqual(req.timestamp.normal, expected.normal)
self.assertEqual(req.timestamp.internal, expected.internal)
def test_path(self): def test_path(self):
req = swift.common.swob.Request.blank('/hi?a=b&c=d') req = swift.common.swob.Request.blank('/hi?a=b&c=d')
self.assertEquals(req.path, '/hi') self.assertEquals(req.path, '/hi')

View File

@ -148,6 +148,562 @@ def reset_loggers():
delattr(utils.get_logger, 'console_handler4logger') delattr(utils.get_logger, 'console_handler4logger')
class TestTimestamp(unittest.TestCase):
"""Tests for swift.common.utils.Timestamp"""
def test_invalid_input(self):
self.assertRaises(ValueError, utils.Timestamp, time.time(), offset=-1)
def test_invalid_string_conversion(self):
t = utils.Timestamp(time.time())
self.assertRaises(TypeError, str, t)
def test_normal_format_no_offset(self):
expected = '1402436408.91203'
test_values = (
'1402436408.91203',
'1402436408.91203_00000000',
'1402436408.912030000',
'1402436408.912030000_0000000000000',
'000001402436408.912030000',
'000001402436408.912030000_0000000000',
1402436408.91203,
1402436408.912029,
1402436408.9120300000000000,
1402436408.91202999999999999,
utils.Timestamp(1402436408.91203),
utils.Timestamp(1402436408.91203, offset=0),
utils.Timestamp(1402436408.912029),
utils.Timestamp(1402436408.912029, offset=0),
utils.Timestamp('1402436408.91203'),
utils.Timestamp('1402436408.91203', offset=0),
utils.Timestamp('1402436408.91203_00000000'),
utils.Timestamp('1402436408.91203_00000000', offset=0),
)
for value in test_values:
timestamp = utils.Timestamp(value)
self.assertEqual(timestamp.normal, expected)
# timestamp instance can also compare to string or float
self.assertEqual(timestamp, expected)
self.assertEqual(timestamp, float(expected))
self.assertEqual(timestamp, utils.normalize_timestamp(expected))
def test_isoformat(self):
expected = '2014-06-10T22:47:32.054580'
test_values = (
'1402440452.05458',
'1402440452.054579',
'1402440452.05458_00000000',
'1402440452.054579_00000000',
'1402440452.054580000',
'1402440452.054579999',
'1402440452.054580000_0000000000000',
'1402440452.054579999_0000ff00',
'000001402440452.054580000',
'000001402440452.0545799',
'000001402440452.054580000_0000000000',
'000001402440452.054579999999_00000fffff',
1402440452.05458,
1402440452.054579,
1402440452.0545800000000000,
1402440452.054579999,
utils.Timestamp(1402440452.05458),
utils.Timestamp(1402440452.0545799),
utils.Timestamp(1402440452.05458, offset=0),
utils.Timestamp(1402440452.05457999999, offset=0),
utils.Timestamp(1402440452.05458, offset=100),
utils.Timestamp(1402440452.054579, offset=100),
utils.Timestamp('1402440452.05458'),
utils.Timestamp('1402440452.054579999'),
utils.Timestamp('1402440452.05458', offset=0),
utils.Timestamp('1402440452.054579', offset=0),
utils.Timestamp('1402440452.05458', offset=300),
utils.Timestamp('1402440452.05457999', offset=300),
utils.Timestamp('1402440452.05458_00000000'),
utils.Timestamp('1402440452.05457999_00000000'),
utils.Timestamp('1402440452.05458_00000000', offset=0),
utils.Timestamp('1402440452.05457999_00000aaa', offset=0),
utils.Timestamp('1402440452.05458_00000000', offset=400),
utils.Timestamp('1402440452.054579_0a', offset=400),
)
for value in test_values:
self.assertEqual(utils.Timestamp(value).isoformat, expected)
expected = '1970-01-01T00:00:00.000000'
test_values = (
'0',
'0000000000.00000',
'0000000000.00000_ffffffffffff',
0,
0.0,
)
for value in test_values:
self.assertEqual(utils.Timestamp(value).isoformat, expected)
def test_not_equal(self):
ts = '1402436408.91203_0000000000000001'
test_values = (
utils.Timestamp('1402436408.91203_0000000000000002'),
utils.Timestamp('1402436408.91203'),
utils.Timestamp(1402436408.91203),
utils.Timestamp(1402436408.91204),
utils.Timestamp(1402436408.91203, offset=0),
utils.Timestamp(1402436408.91203, offset=2),
)
for value in test_values:
self.assertTrue(value != ts)
def test_no_force_internal_no_offset(self):
"""Test that internal is the same as normal with no offset"""
with mock.patch('swift.common.utils.FORCE_INTERNAL', new=False):
self.assertEqual(utils.Timestamp(0).internal, '0000000000.00000')
self.assertEqual(utils.Timestamp(1402437380.58186).internal,
'1402437380.58186')
self.assertEqual(utils.Timestamp(1402437380.581859).internal,
'1402437380.58186')
self.assertEqual(utils.Timestamp(0).internal,
utils.normalize_timestamp(0))
def test_no_force_internal_with_offset(self):
"""Test that internal always includes the offset if significant"""
with mock.patch('swift.common.utils.FORCE_INTERNAL', new=False):
self.assertEqual(utils.Timestamp(0, offset=1).internal,
'0000000000.00000_0000000000000001')
self.assertEqual(
utils.Timestamp(1402437380.58186, offset=16).internal,
'1402437380.58186_0000000000000010')
self.assertEqual(
utils.Timestamp(1402437380.581859, offset=240).internal,
'1402437380.58186_00000000000000f0')
self.assertEqual(
utils.Timestamp('1402437380.581859_00000001',
offset=240).internal,
'1402437380.58186_00000000000000f1')
def test_force_internal(self):
"""Test that internal always includes the offset if forced"""
with mock.patch('swift.common.utils.FORCE_INTERNAL', new=True):
self.assertEqual(utils.Timestamp(0).internal,
'0000000000.00000_0000000000000000')
self.assertEqual(utils.Timestamp(1402437380.58186).internal,
'1402437380.58186_0000000000000000')
self.assertEqual(utils.Timestamp(1402437380.581859).internal,
'1402437380.58186_0000000000000000')
self.assertEqual(utils.Timestamp(0, offset=1).internal,
'0000000000.00000_0000000000000001')
self.assertEqual(
utils.Timestamp(1402437380.58186, offset=16).internal,
'1402437380.58186_0000000000000010')
self.assertEqual(
utils.Timestamp(1402437380.581859, offset=16).internal,
'1402437380.58186_0000000000000010')
def test_internal_format_no_offset(self):
expected = '1402436408.91203_0000000000000000'
test_values = (
'1402436408.91203',
'1402436408.91203_00000000',
'1402436408.912030000',
'1402436408.912030000_0000000000000',
'000001402436408.912030000',
'000001402436408.912030000_0000000000',
1402436408.91203,
1402436408.9120300000000000,
1402436408.912029,
1402436408.912029999999999999,
utils.Timestamp(1402436408.91203),
utils.Timestamp(1402436408.91203, offset=0),
utils.Timestamp(1402436408.912029),
utils.Timestamp(1402436408.91202999999999999, offset=0),
utils.Timestamp('1402436408.91203'),
utils.Timestamp('1402436408.91203', offset=0),
utils.Timestamp('1402436408.912029'),
utils.Timestamp('1402436408.912029', offset=0),
utils.Timestamp('1402436408.912029999999999'),
utils.Timestamp('1402436408.912029999999999', offset=0),
)
for value in test_values:
# timestamp instance is always equivalent
self.assertEqual(utils.Timestamp(value), expected)
if utils.FORCE_INTERNAL:
# the FORCE_INTERNAL flag makes the internal format always
# include the offset portion of the timestamp even when it's
# not significant and would be bad during upgrades
self.assertEqual(utils.Timestamp(value).internal, expected)
else:
# unless we FORCE_INTERNAL, when there's no offset the
# internal format is equivalent to the normalized format
self.assertEqual(utils.Timestamp(value).internal,
'1402436408.91203')
def test_internal_format_with_offset(self):
expected = '1402436408.91203_00000000000000f0'
test_values = (
'1402436408.91203_000000f0',
'1402436408.912030000_0000000000f0',
'1402436408.912029_000000f0',
'1402436408.91202999999_0000000000f0',
'000001402436408.912030000_000000000f0',
'000001402436408.9120299999_000000000f0',
utils.Timestamp(1402436408.91203, offset=240),
utils.Timestamp(1402436408.912029, offset=240),
utils.Timestamp('1402436408.91203', offset=240),
utils.Timestamp('1402436408.91203_00000000', offset=240),
utils.Timestamp('1402436408.91203_0000000f', offset=225),
utils.Timestamp('1402436408.9120299999', offset=240),
utils.Timestamp('1402436408.9120299999_00000000', offset=240),
utils.Timestamp('1402436408.9120299999_00000010', offset=224),
)
for value in test_values:
timestamp = utils.Timestamp(value)
self.assertEqual(timestamp.internal, expected)
# can compare with offset if the string is internalized
self.assertEqual(timestamp, expected)
# if comparison value only includes the normalized portion and the
# timestamp includes an offset, it is considered greater
normal = utils.Timestamp(expected).normal
self.assertTrue(timestamp > normal,
'%r is not bigger than %r given %r' % (
timestamp, normal, value))
self.assertTrue(timestamp > float(normal),
'%r is not bigger than %f given %r' % (
timestamp, float(normal), value))
def test_int(self):
expected = 1402437965
test_values = (
'1402437965.91203',
'1402437965.91203_00000000',
'1402437965.912030000',
'1402437965.912030000_0000000000000',
'000001402437965.912030000',
'000001402437965.912030000_0000000000',
1402437965.91203,
1402437965.9120300000000000,
1402437965.912029,
1402437965.912029999999999999,
utils.Timestamp(1402437965.91203),
utils.Timestamp(1402437965.91203, offset=0),
utils.Timestamp(1402437965.91203, offset=500),
utils.Timestamp(1402437965.912029),
utils.Timestamp(1402437965.91202999999999999, offset=0),
utils.Timestamp(1402437965.91202999999999999, offset=300),
utils.Timestamp('1402437965.91203'),
utils.Timestamp('1402437965.91203', offset=0),
utils.Timestamp('1402437965.91203', offset=400),
utils.Timestamp('1402437965.912029'),
utils.Timestamp('1402437965.912029', offset=0),
utils.Timestamp('1402437965.912029', offset=200),
utils.Timestamp('1402437965.912029999999999'),
utils.Timestamp('1402437965.912029999999999', offset=0),
utils.Timestamp('1402437965.912029999999999', offset=100),
)
for value in test_values:
timestamp = utils.Timestamp(value)
self.assertEqual(int(timestamp), expected)
self.assertTrue(timestamp > expected)
def test_float(self):
expected = 1402438115.91203
test_values = (
'1402438115.91203',
'1402438115.91203_00000000',
'1402438115.912030000',
'1402438115.912030000_0000000000000',
'000001402438115.912030000',
'000001402438115.912030000_0000000000',
1402438115.91203,
1402438115.9120300000000000,
1402438115.912029,
1402438115.912029999999999999,
utils.Timestamp(1402438115.91203),
utils.Timestamp(1402438115.91203, offset=0),
utils.Timestamp(1402438115.91203, offset=500),
utils.Timestamp(1402438115.912029),
utils.Timestamp(1402438115.91202999999999999, offset=0),
utils.Timestamp(1402438115.91202999999999999, offset=300),
utils.Timestamp('1402438115.91203'),
utils.Timestamp('1402438115.91203', offset=0),
utils.Timestamp('1402438115.91203', offset=400),
utils.Timestamp('1402438115.912029'),
utils.Timestamp('1402438115.912029', offset=0),
utils.Timestamp('1402438115.912029', offset=200),
utils.Timestamp('1402438115.912029999999999'),
utils.Timestamp('1402438115.912029999999999', offset=0),
utils.Timestamp('1402438115.912029999999999', offset=100),
)
tolerance = 0.00001
minimum = expected - tolerance
maximum = expected + tolerance
for value in test_values:
timestamp = utils.Timestamp(value)
self.assertTrue(float(timestamp) > minimum,
'%f is not bigger than %f given %r' % (
timestamp, minimum, value))
self.assertTrue(float(timestamp) < maximum,
'%f is not smaller than %f given %r' % (
timestamp, maximum, value))
# direct comparision of timestamp works too
self.assertTrue(timestamp > minimum,
'%s is not bigger than %f given %r' % (
timestamp.normal, minimum, value))
self.assertTrue(timestamp < maximum,
'%s is not smaller than %f given %r' % (
timestamp.normal, maximum, value))
# ... even against strings
self.assertTrue(timestamp > '%f' % minimum,
'%s is not bigger than %s given %r' % (
timestamp.normal, minimum, value))
self.assertTrue(timestamp < '%f' % maximum,
'%s is not smaller than %s given %r' % (
timestamp.normal, maximum, value))
def test_false(self):
self.assertFalse(utils.Timestamp(0))
self.assertFalse(utils.Timestamp(0, offset=0))
self.assertFalse(utils.Timestamp('0'))
self.assertFalse(utils.Timestamp('0', offset=0))
self.assertFalse(utils.Timestamp(0.0))
self.assertFalse(utils.Timestamp(0.0, offset=0))
self.assertFalse(utils.Timestamp('0.0'))
self.assertFalse(utils.Timestamp('0.0', offset=0))
self.assertFalse(utils.Timestamp(00000000.00000000))
self.assertFalse(utils.Timestamp(00000000.00000000, offset=0))
self.assertFalse(utils.Timestamp('00000000.00000000'))
self.assertFalse(utils.Timestamp('00000000.00000000', offset=0))
def test_true(self):
self.assertTrue(utils.Timestamp(1))
self.assertTrue(utils.Timestamp(1, offset=1))
self.assertTrue(utils.Timestamp(0, offset=1))
self.assertTrue(utils.Timestamp('1'))
self.assertTrue(utils.Timestamp('1', offset=1))
self.assertTrue(utils.Timestamp('0', offset=1))
self.assertTrue(utils.Timestamp(1.1))
self.assertTrue(utils.Timestamp(1.1, offset=1))
self.assertTrue(utils.Timestamp(0.0, offset=1))
self.assertTrue(utils.Timestamp('1.1'))
self.assertTrue(utils.Timestamp('1.1', offset=1))
self.assertTrue(utils.Timestamp('0.0', offset=1))
self.assertTrue(utils.Timestamp(11111111.11111111))
self.assertTrue(utils.Timestamp(11111111.11111111, offset=1))
self.assertTrue(utils.Timestamp(00000000.00000000, offset=1))
self.assertTrue(utils.Timestamp('11111111.11111111'))
self.assertTrue(utils.Timestamp('11111111.11111111', offset=1))
self.assertTrue(utils.Timestamp('00000000.00000000', offset=1))
def test_greater_no_offset(self):
now = time.time()
older = now - 1
timestamp = utils.Timestamp(now)
test_values = (
0, '0', 0.0, '0.0', '0000.0000', '000.000_000',
1, '1', 1.1, '1.1', '1111.1111', '111.111_111',
1402443112.213252, '1402443112.213252', '1402443112.213252_ffff',
older, '%f' % older, '%f_0000ffff' % older,
)
for value in test_values:
other = utils.Timestamp(value)
self.assertNotEqual(timestamp, other) # sanity
self.assertTrue(timestamp > value,
'%r is not greater than %r given %r' % (
timestamp, value, value))
self.assertTrue(timestamp > other,
'%r is not greater than %r given %r' % (
timestamp, other, value))
self.assertTrue(timestamp > other.normal,
'%r is not greater than %r given %r' % (
timestamp, other.normal, value))
self.assertTrue(timestamp > other.internal,
'%r is not greater than %r given %r' % (
timestamp, other.internal, value))
self.assertTrue(timestamp > float(other),
'%r is not greater than %r given %r' % (
timestamp, float(other), value))
self.assertTrue(timestamp > int(other),
'%r is not greater than %r given %r' % (
timestamp, int(other), value))
def test_greater_with_offset(self):
now = time.time()
older = now - 1
test_values = (
0, '0', 0.0, '0.0', '0000.0000', '000.000_000',
1, '1', 1.1, '1.1', '1111.1111', '111.111_111',
1402443346.935174, '1402443346.93517', '1402443346.935169_ffff',
older, '%f' % older, '%f_0000ffff' % older,
now, '%f' % now, '%f_00000000' % now,
)
for offset in range(1, 1000, 100):
timestamp = utils.Timestamp(now, offset=offset)
for value in test_values:
other = utils.Timestamp(value)
self.assertNotEqual(timestamp, other) # sanity
self.assertTrue(timestamp > value,
'%r is not greater than %r given %r' % (
timestamp, value, value))
self.assertTrue(timestamp > other,
'%r is not greater than %r given %r' % (
timestamp, other, value))
self.assertTrue(timestamp > other.normal,
'%r is not greater than %r given %r' % (
timestamp, other.normal, value))
self.assertTrue(timestamp > other.internal,
'%r is not greater than %r given %r' % (
timestamp, other.internal, value))
self.assertTrue(timestamp > float(other),
'%r is not greater than %r given %r' % (
timestamp, float(other), value))
self.assertTrue(timestamp > int(other),
'%r is not greater than %r given %r' % (
timestamp, int(other), value))
def test_smaller_no_offset(self):
now = time.time()
newer = now + 1
timestamp = utils.Timestamp(now)
test_values = (
9999999999.99999, '9999999999.99999', '9999999999.99999_ffff',
newer, '%f' % newer, '%f_0000ffff' % newer,
)
for value in test_values:
other = utils.Timestamp(value)
self.assertNotEqual(timestamp, other) # sanity
self.assertTrue(timestamp < value,
'%r is not smaller than %r given %r' % (
timestamp, value, value))
self.assertTrue(timestamp < other,
'%r is not smaller than %r given %r' % (
timestamp, other, value))
self.assertTrue(timestamp < other.normal,
'%r is not smaller than %r given %r' % (
timestamp, other.normal, value))
self.assertTrue(timestamp < other.internal,
'%r is not smaller than %r given %r' % (
timestamp, other.internal, value))
self.assertTrue(timestamp < float(other),
'%r is not smaller than %r given %r' % (
timestamp, float(other), value))
self.assertTrue(timestamp < int(other),
'%r is not smaller than %r given %r' % (
timestamp, int(other), value))
def test_smaller_with_offset(self):
now = time.time()
newer = now + 1
test_values = (
9999999999.99999, '9999999999.99999', '9999999999.99999_ffff',
newer, '%f' % newer, '%f_0000ffff' % newer,
)
for offset in range(1, 1000, 100):
timestamp = utils.Timestamp(now, offset=offset)
for value in test_values:
other = utils.Timestamp(value)
self.assertNotEqual(timestamp, other) # sanity
self.assertTrue(timestamp < value,
'%r is not smaller than %r given %r' % (
timestamp, value, value))
self.assertTrue(timestamp < other,
'%r is not smaller than %r given %r' % (
timestamp, other, value))
self.assertTrue(timestamp < other.normal,
'%r is not smaller than %r given %r' % (
timestamp, other.normal, value))
self.assertTrue(timestamp < other.internal,
'%r is not smaller than %r given %r' % (
timestamp, other.internal, value))
self.assertTrue(timestamp < float(other),
'%r is not smaller than %r given %r' % (
timestamp, float(other), value))
self.assertTrue(timestamp < int(other),
'%r is not smaller than %r given %r' % (
timestamp, int(other), value))
def test_ordering(self):
given = [
'1402444820.62590_000000000000000a',
'1402444820.62589_0000000000000001',
'1402444821.52589_0000000000000004',
'1402444920.62589_0000000000000004',
'1402444821.62589_000000000000000a',
'1402444821.72589_000000000000000a',
'1402444920.62589_0000000000000002',
'1402444820.62589_0000000000000002',
'1402444820.62589_000000000000000a',
'1402444820.62590_0000000000000004',
'1402444920.62589_000000000000000a',
'1402444820.62590_0000000000000002',
'1402444821.52589_0000000000000002',
'1402444821.52589_0000000000000000',
'1402444920.62589',
'1402444821.62589_0000000000000004',
'1402444821.72589_0000000000000001',
'1402444820.62590',
'1402444820.62590_0000000000000001',
'1402444820.62589_0000000000000004',
'1402444821.72589_0000000000000000',
'1402444821.52589_000000000000000a',
'1402444821.72589_0000000000000004',
'1402444821.62589',
'1402444821.52589_0000000000000001',
'1402444821.62589_0000000000000001',
'1402444821.62589_0000000000000002',
'1402444821.72589_0000000000000002',
'1402444820.62589',
'1402444920.62589_0000000000000001']
expected = [
'1402444820.62589',
'1402444820.62589_0000000000000001',
'1402444820.62589_0000000000000002',
'1402444820.62589_0000000000000004',
'1402444820.62589_000000000000000a',
'1402444820.62590',
'1402444820.62590_0000000000000001',
'1402444820.62590_0000000000000002',
'1402444820.62590_0000000000000004',
'1402444820.62590_000000000000000a',
'1402444821.52589',
'1402444821.52589_0000000000000001',
'1402444821.52589_0000000000000002',
'1402444821.52589_0000000000000004',
'1402444821.52589_000000000000000a',
'1402444821.62589',
'1402444821.62589_0000000000000001',
'1402444821.62589_0000000000000002',
'1402444821.62589_0000000000000004',
'1402444821.62589_000000000000000a',
'1402444821.72589',
'1402444821.72589_0000000000000001',
'1402444821.72589_0000000000000002',
'1402444821.72589_0000000000000004',
'1402444821.72589_000000000000000a',
'1402444920.62589',
'1402444920.62589_0000000000000001',
'1402444920.62589_0000000000000002',
'1402444920.62589_0000000000000004',
'1402444920.62589_000000000000000a',
]
# less visual version
"""
now = time.time()
given = [
utils.Timestamp(now + i, offset=offset).internal
for i in (0, 0.00001, 0.9, 1.0, 1.1, 100.0)
for offset in (0, 1, 2, 4, 10)
]
expected = [t for t in given]
random.shuffle(given)
"""
self.assertEqual(len(given), len(expected)) # sanity
timestamps = [utils.Timestamp(t) for t in given]
# our expected values don't include insignificant offsets
with mock.patch('swift.common.utils.FORCE_INTERNAL', new=False):
self.assertEqual(
[t.internal for t in sorted(timestamps)], expected)
# string sorting works as well
self.assertEqual(
sorted([t.internal for t in timestamps]), expected)
class TestUtils(unittest.TestCase): class TestUtils(unittest.TestCase):
"""Tests for swift.common.utils """ """Tests for swift.common.utils """

View File

@ -28,7 +28,7 @@ import sqlite3
import pickle import pickle
from swift.container.backend import ContainerBroker from swift.container.backend import ContainerBroker
from swift.common.utils import normalize_timestamp from swift.common.utils import Timestamp
from swift.common.storage_policy import POLICIES from swift.common.storage_policy import POLICIES
import mock import mock
@ -44,7 +44,7 @@ class TestContainerBroker(unittest.TestCase):
# Test ContainerBroker.__init__ # Test ContainerBroker.__init__
broker = ContainerBroker(':memory:', account='a', container='c') broker = ContainerBroker(':memory:', account='a', container='c')
self.assertEqual(broker.db_file, ':memory:') self.assertEqual(broker.db_file, ':memory:')
broker.initialize(normalize_timestamp('1'), 0) broker.initialize(Timestamp('1').internal, 0)
with broker.get() as conn: with broker.get() as conn:
curs = conn.cursor() curs = conn.cursor()
curs.execute('SELECT 1') curs.execute('SELECT 1')
@ -52,11 +52,11 @@ class TestContainerBroker(unittest.TestCase):
@patch_policies @patch_policies
def test_storage_policy_property(self): def test_storage_policy_property(self):
ts = itertools.count(1) ts = (Timestamp(t).internal for t in itertools.count(int(time())))
for policy in POLICIES: for policy in POLICIES:
broker = ContainerBroker(':memory:', account='a', broker = ContainerBroker(':memory:', account='a',
container='policy_%s' % policy.name) container='policy_%s' % policy.name)
broker.initialize(normalize_timestamp(ts.next()), policy.idx) broker.initialize(ts.next(), policy.idx)
with broker.get() as conn: with broker.get() as conn:
try: try:
conn.execute('''SELECT storage_policy_index conn.execute('''SELECT storage_policy_index
@ -78,7 +78,7 @@ class TestContainerBroker(unittest.TestCase):
# unhandled exception # unhandled exception
first_conn = None first_conn = None
broker = ContainerBroker(':memory:', account='a', container='c') broker = ContainerBroker(':memory:', account='a', container='c')
broker.initialize(normalize_timestamp('1'), 0) broker.initialize(Timestamp('1').internal, 0)
with broker.get() as conn: with broker.get() as conn:
first_conn = conn first_conn = conn
try: try:
@ -92,20 +92,20 @@ class TestContainerBroker(unittest.TestCase):
def test_empty(self): def test_empty(self):
# Test ContainerBroker.empty # Test ContainerBroker.empty
broker = ContainerBroker(':memory:', account='a', container='c') broker = ContainerBroker(':memory:', account='a', container='c')
broker.initialize(normalize_timestamp('1'), 0) broker.initialize(Timestamp('1').internal, 0)
self.assert_(broker.empty()) self.assert_(broker.empty())
broker.put_object('o', normalize_timestamp(time()), 0, 'text/plain', broker.put_object('o', Timestamp(time()).internal, 0, 'text/plain',
'd41d8cd98f00b204e9800998ecf8427e') 'd41d8cd98f00b204e9800998ecf8427e')
self.assert_(not broker.empty()) self.assert_(not broker.empty())
sleep(.00001) sleep(.00001)
broker.delete_object('o', normalize_timestamp(time())) broker.delete_object('o', Timestamp(time()).internal)
self.assert_(broker.empty()) self.assert_(broker.empty())
def test_reclaim(self): def test_reclaim(self):
broker = ContainerBroker(':memory:', account='test_account', broker = ContainerBroker(':memory:', account='test_account',
container='test_container') container='test_container')
broker.initialize(normalize_timestamp('1'), 0) broker.initialize(Timestamp('1').internal, 0)
broker.put_object('o', normalize_timestamp(time()), 0, 'text/plain', broker.put_object('o', Timestamp(time()).internal, 0, 'text/plain',
'd41d8cd98f00b204e9800998ecf8427e') 'd41d8cd98f00b204e9800998ecf8427e')
with broker.get() as conn: with broker.get() as conn:
self.assertEquals(conn.execute( self.assertEquals(conn.execute(
@ -114,7 +114,7 @@ class TestContainerBroker(unittest.TestCase):
self.assertEquals(conn.execute( self.assertEquals(conn.execute(
"SELECT count(*) FROM object " "SELECT count(*) FROM object "
"WHERE deleted = 1").fetchone()[0], 0) "WHERE deleted = 1").fetchone()[0], 0)
broker.reclaim(normalize_timestamp(time() - 999), time()) broker.reclaim(Timestamp(time() - 999).internal, time())
with broker.get() as conn: with broker.get() as conn:
self.assertEquals(conn.execute( self.assertEquals(conn.execute(
"SELECT count(*) FROM object " "SELECT count(*) FROM object "
@ -123,7 +123,7 @@ class TestContainerBroker(unittest.TestCase):
"SELECT count(*) FROM object " "SELECT count(*) FROM object "
"WHERE deleted = 1").fetchone()[0], 0) "WHERE deleted = 1").fetchone()[0], 0)
sleep(.00001) sleep(.00001)
broker.delete_object('o', normalize_timestamp(time())) broker.delete_object('o', Timestamp(time()).internal)
with broker.get() as conn: with broker.get() as conn:
self.assertEquals(conn.execute( self.assertEquals(conn.execute(
"SELECT count(*) FROM object " "SELECT count(*) FROM object "
@ -131,7 +131,7 @@ class TestContainerBroker(unittest.TestCase):
self.assertEquals(conn.execute( self.assertEquals(conn.execute(
"SELECT count(*) FROM object " "SELECT count(*) FROM object "
"WHERE deleted = 1").fetchone()[0], 1) "WHERE deleted = 1").fetchone()[0], 1)
broker.reclaim(normalize_timestamp(time() - 999), time()) broker.reclaim(Timestamp(time() - 999).internal, time())
with broker.get() as conn: with broker.get() as conn:
self.assertEquals(conn.execute( self.assertEquals(conn.execute(
"SELECT count(*) FROM object " "SELECT count(*) FROM object "
@ -140,7 +140,7 @@ class TestContainerBroker(unittest.TestCase):
"SELECT count(*) FROM object " "SELECT count(*) FROM object "
"WHERE deleted = 1").fetchone()[0], 1) "WHERE deleted = 1").fetchone()[0], 1)
sleep(.00001) sleep(.00001)
broker.reclaim(normalize_timestamp(time()), time()) broker.reclaim(Timestamp(time()).internal, time())
with broker.get() as conn: with broker.get() as conn:
self.assertEquals(conn.execute( self.assertEquals(conn.execute(
"SELECT count(*) FROM object " "SELECT count(*) FROM object "
@ -149,21 +149,21 @@ class TestContainerBroker(unittest.TestCase):
"SELECT count(*) FROM object " "SELECT count(*) FROM object "
"WHERE deleted = 1").fetchone()[0], 0) "WHERE deleted = 1").fetchone()[0], 0)
# Test the return values of reclaim() # Test the return values of reclaim()
broker.put_object('w', normalize_timestamp(time()), 0, 'text/plain', broker.put_object('w', Timestamp(time()).internal, 0, 'text/plain',
'd41d8cd98f00b204e9800998ecf8427e') 'd41d8cd98f00b204e9800998ecf8427e')
broker.put_object('x', normalize_timestamp(time()), 0, 'text/plain', broker.put_object('x', Timestamp(time()).internal, 0, 'text/plain',
'd41d8cd98f00b204e9800998ecf8427e') 'd41d8cd98f00b204e9800998ecf8427e')
broker.put_object('y', normalize_timestamp(time()), 0, 'text/plain', broker.put_object('y', Timestamp(time()).internal, 0, 'text/plain',
'd41d8cd98f00b204e9800998ecf8427e') 'd41d8cd98f00b204e9800998ecf8427e')
broker.put_object('z', normalize_timestamp(time()), 0, 'text/plain', broker.put_object('z', Timestamp(time()).internal, 0, 'text/plain',
'd41d8cd98f00b204e9800998ecf8427e') 'd41d8cd98f00b204e9800998ecf8427e')
# Test before deletion # Test before deletion
broker.reclaim(normalize_timestamp(time()), time()) broker.reclaim(Timestamp(time()).internal, time())
broker.delete_db(normalize_timestamp(time())) broker.delete_db(Timestamp(time()).internal)
def test_get_info_is_deleted(self): def test_get_info_is_deleted(self):
start = int(time()) start = int(time())
ts = (normalize_timestamp(t) for t in itertools.count(start)) ts = (Timestamp(t).internal for t in itertools.count(start))
broker = ContainerBroker(':memory:', account='test_account', broker = ContainerBroker(':memory:', account='test_account',
container='test_container') container='test_container')
# create it # create it
@ -172,8 +172,8 @@ class TestContainerBroker(unittest.TestCase):
self.assertEqual(is_deleted, broker.is_deleted()) self.assertEqual(is_deleted, broker.is_deleted())
self.assertEqual(is_deleted, False) # sanity self.assertEqual(is_deleted, False) # sanity
self.assertEqual(info, broker.get_info()) self.assertEqual(info, broker.get_info())
self.assertEqual(info['put_timestamp'], normalize_timestamp(start)) self.assertEqual(info['put_timestamp'], Timestamp(start).internal)
self.assert_(float(info['created_at']) >= start) self.assert_(Timestamp(info['created_at']) >= start)
self.assertEqual(info['delete_timestamp'], '0') self.assertEqual(info['delete_timestamp'], '0')
if self.__class__ in (TestContainerBrokerBeforeMetadata, if self.__class__ in (TestContainerBrokerBeforeMetadata,
TestContainerBrokerBeforeXSync, TestContainerBrokerBeforeXSync,
@ -181,7 +181,7 @@ class TestContainerBroker(unittest.TestCase):
self.assertEqual(info['status_changed_at'], '0') self.assertEqual(info['status_changed_at'], '0')
else: else:
self.assertEqual(info['status_changed_at'], self.assertEqual(info['status_changed_at'],
normalize_timestamp(start)) Timestamp(start).internal)
# delete it # delete it
delete_timestamp = ts.next() delete_timestamp = ts.next()
@ -190,8 +190,8 @@ class TestContainerBroker(unittest.TestCase):
self.assertEqual(is_deleted, True) # sanity self.assertEqual(is_deleted, True) # sanity
self.assertEqual(is_deleted, broker.is_deleted()) self.assertEqual(is_deleted, broker.is_deleted())
self.assertEqual(info, broker.get_info()) self.assertEqual(info, broker.get_info())
self.assertEqual(info['put_timestamp'], normalize_timestamp(start)) self.assertEqual(info['put_timestamp'], Timestamp(start).internal)
self.assert_(float(info['created_at']) >= start) self.assert_(Timestamp(info['created_at']) >= start)
self.assertEqual(info['delete_timestamp'], delete_timestamp) self.assertEqual(info['delete_timestamp'], delete_timestamp)
self.assertEqual(info['status_changed_at'], delete_timestamp) self.assertEqual(info['status_changed_at'], delete_timestamp)
@ -202,16 +202,16 @@ class TestContainerBroker(unittest.TestCase):
self.assertEqual(is_deleted, False) # sanity self.assertEqual(is_deleted, False) # sanity
self.assertEqual(is_deleted, broker.is_deleted()) self.assertEqual(is_deleted, broker.is_deleted())
self.assertEqual(info, broker.get_info()) self.assertEqual(info, broker.get_info())
self.assertEqual(info['put_timestamp'], normalize_timestamp(start)) self.assertEqual(info['put_timestamp'], Timestamp(start).internal)
self.assert_(float(info['created_at']) >= start) self.assert_(Timestamp(info['created_at']) >= start)
self.assertEqual(info['delete_timestamp'], delete_timestamp) self.assertEqual(info['delete_timestamp'], delete_timestamp)
self.assertEqual(info['status_changed_at'], delete_timestamp) self.assertEqual(info['status_changed_at'], delete_timestamp)
def test_delete_object(self): def test_delete_object(self):
# Test ContainerBroker.delete_object # Test ContainerBroker.delete_object
broker = ContainerBroker(':memory:', account='a', container='c') broker = ContainerBroker(':memory:', account='a', container='c')
broker.initialize(normalize_timestamp('1'), 0) broker.initialize(Timestamp('1').internal, 0)
broker.put_object('o', normalize_timestamp(time()), 0, 'text/plain', broker.put_object('o', Timestamp(time()).internal, 0, 'text/plain',
'd41d8cd98f00b204e9800998ecf8427e') 'd41d8cd98f00b204e9800998ecf8427e')
with broker.get() as conn: with broker.get() as conn:
self.assertEquals(conn.execute( self.assertEquals(conn.execute(
@ -221,7 +221,7 @@ class TestContainerBroker(unittest.TestCase):
"SELECT count(*) FROM object " "SELECT count(*) FROM object "
"WHERE deleted = 1").fetchone()[0], 0) "WHERE deleted = 1").fetchone()[0], 0)
sleep(.00001) sleep(.00001)
broker.delete_object('o', normalize_timestamp(time())) broker.delete_object('o', Timestamp(time()).internal)
with broker.get() as conn: with broker.get() as conn:
self.assertEquals(conn.execute( self.assertEquals(conn.execute(
"SELECT count(*) FROM object " "SELECT count(*) FROM object "
@ -233,10 +233,10 @@ class TestContainerBroker(unittest.TestCase):
def test_put_object(self): def test_put_object(self):
# Test ContainerBroker.put_object # Test ContainerBroker.put_object
broker = ContainerBroker(':memory:', account='a', container='c') broker = ContainerBroker(':memory:', account='a', container='c')
broker.initialize(normalize_timestamp('1'), 0) broker.initialize(Timestamp('1').internal, 0)
# Create initial object # Create initial object
timestamp = normalize_timestamp(time()) timestamp = Timestamp(time()).internal
broker.put_object('"{<object \'&\' name>}"', timestamp, 123, broker.put_object('"{<object \'&\' name>}"', timestamp, 123,
'application/x-test', 'application/x-test',
'5af83e3196bf99f440f31f2e1a6c9afe') '5af83e3196bf99f440f31f2e1a6c9afe')
@ -280,7 +280,7 @@ class TestContainerBroker(unittest.TestCase):
# Put new event # Put new event
sleep(.00001) sleep(.00001)
timestamp = normalize_timestamp(time()) timestamp = Timestamp(time()).internal
broker.put_object('"{<object \'&\' name>}"', timestamp, 124, broker.put_object('"{<object \'&\' name>}"', timestamp, 124,
'application/x-test', 'application/x-test',
'aa0749bacbc79ec65fe206943d8fe449') 'aa0749bacbc79ec65fe206943d8fe449')
@ -302,7 +302,7 @@ class TestContainerBroker(unittest.TestCase):
"SELECT deleted FROM object").fetchone()[0], 0) "SELECT deleted FROM object").fetchone()[0], 0)
# Put old event # Put old event
otimestamp = normalize_timestamp(float(timestamp) - 1) otimestamp = Timestamp(float(Timestamp(timestamp)) - 1).internal
broker.put_object('"{<object \'&\' name>}"', otimestamp, 124, broker.put_object('"{<object \'&\' name>}"', otimestamp, 124,
'application/x-test', 'application/x-test',
'aa0749bacbc79ec65fe206943d8fe449') 'aa0749bacbc79ec65fe206943d8fe449')
@ -324,7 +324,7 @@ class TestContainerBroker(unittest.TestCase):
"SELECT deleted FROM object").fetchone()[0], 0) "SELECT deleted FROM object").fetchone()[0], 0)
# Put old delete event # Put old delete event
dtimestamp = normalize_timestamp(float(timestamp) - 1) dtimestamp = Timestamp(float(Timestamp(timestamp)) - 1).internal
broker.put_object('"{<object \'&\' name>}"', dtimestamp, 0, '', '', broker.put_object('"{<object \'&\' name>}"', dtimestamp, 0, '', '',
deleted=1) deleted=1)
with broker.get() as conn: with broker.get() as conn:
@ -346,7 +346,7 @@ class TestContainerBroker(unittest.TestCase):
# Put new delete event # Put new delete event
sleep(.00001) sleep(.00001)
timestamp = normalize_timestamp(time()) timestamp = Timestamp(time()).internal
broker.put_object('"{<object \'&\' name>}"', timestamp, 0, '', '', broker.put_object('"{<object \'&\' name>}"', timestamp, 0, '', '',
deleted=1) deleted=1)
with broker.get() as conn: with broker.get() as conn:
@ -360,7 +360,7 @@ class TestContainerBroker(unittest.TestCase):
# Put new event # Put new event
sleep(.00001) sleep(.00001)
timestamp = normalize_timestamp(time()) timestamp = Timestamp(time()).internal
broker.put_object('"{<object \'&\' name>}"', timestamp, 123, broker.put_object('"{<object \'&\' name>}"', timestamp, 123,
'application/x-test', 'application/x-test',
'5af83e3196bf99f440f31f2e1a6c9afe') '5af83e3196bf99f440f31f2e1a6c9afe')
@ -383,12 +383,12 @@ class TestContainerBroker(unittest.TestCase):
# We'll use this later # We'll use this later
sleep(.0001) sleep(.0001)
in_between_timestamp = normalize_timestamp(time()) in_between_timestamp = Timestamp(time()).internal
# New post event # New post event
sleep(.0001) sleep(.0001)
previous_timestamp = timestamp previous_timestamp = timestamp
timestamp = normalize_timestamp(time()) timestamp = Timestamp(time()).internal
with broker.get() as conn: with broker.get() as conn:
self.assertEquals(conn.execute( self.assertEquals(conn.execute(
"SELECT name FROM object").fetchone()[0], "SELECT name FROM object").fetchone()[0],
@ -432,7 +432,7 @@ class TestContainerBroker(unittest.TestCase):
@patch_policies @patch_policies
def test_put_misplaced_object_does_not_effect_container_stats(self): def test_put_misplaced_object_does_not_effect_container_stats(self):
policy = random.choice(list(POLICIES)) policy = random.choice(list(POLICIES))
ts = (normalize_timestamp(t) for t in ts = (Timestamp(t).internal for t in
itertools.count(int(time()))) itertools.count(int(time())))
broker = ContainerBroker(':memory:', broker = ContainerBroker(':memory:',
account='a', container='c') account='a', container='c')
@ -460,7 +460,7 @@ class TestContainerBroker(unittest.TestCase):
@patch_policies @patch_policies
def test_has_multiple_policies(self): def test_has_multiple_policies(self):
policy = random.choice(list(POLICIES)) policy = random.choice(list(POLICIES))
ts = (normalize_timestamp(t) for t in ts = (Timestamp(t).internal for t in
itertools.count(int(time()))) itertools.count(int(time())))
broker = ContainerBroker(':memory:', broker = ContainerBroker(':memory:',
account='a', container='c') account='a', container='c')
@ -484,7 +484,7 @@ class TestContainerBroker(unittest.TestCase):
@patch_policies @patch_policies
def test_get_policy_info(self): def test_get_policy_info(self):
policy = random.choice(list(POLICIES)) policy = random.choice(list(POLICIES))
ts = (normalize_timestamp(t) for t in ts = (Timestamp(t).internal for t in
itertools.count(int(time()))) itertools.count(int(time())))
broker = ContainerBroker(':memory:', broker = ContainerBroker(':memory:',
account='a', container='c') account='a', container='c')
@ -521,7 +521,7 @@ class TestContainerBroker(unittest.TestCase):
self.assertEqual(policy_stats, expected) self.assertEqual(policy_stats, expected)
def test_policy_stat_tracking(self): def test_policy_stat_tracking(self):
ts = (normalize_timestamp(t) for t in ts = (Timestamp(t).internal for t in
itertools.count(int(time()))) itertools.count(int(time())))
broker = ContainerBroker(':memory:', broker = ContainerBroker(':memory:',
account='a', container='c') account='a', container='c')
@ -558,13 +558,13 @@ class TestContainerBroker(unittest.TestCase):
# Test ContainerBroker.get_info # Test ContainerBroker.get_info
broker = ContainerBroker(':memory:', account='test1', broker = ContainerBroker(':memory:', account='test1',
container='test2') container='test2')
broker.initialize(normalize_timestamp('1'), 0) broker.initialize(Timestamp('1').internal, 0)
info = broker.get_info() info = broker.get_info()
self.assertEquals(info['account'], 'test1') self.assertEquals(info['account'], 'test1')
self.assertEquals(info['container'], 'test2') self.assertEquals(info['container'], 'test2')
self.assertEquals(info['hash'], '00000000000000000000000000000000') self.assertEquals(info['hash'], '00000000000000000000000000000000')
self.assertEqual(info['put_timestamp'], normalize_timestamp(1)) self.assertEqual(info['put_timestamp'], Timestamp(1).internal)
self.assertEqual(info['delete_timestamp'], '0') self.assertEqual(info['delete_timestamp'], '0')
if self.__class__ in (TestContainerBrokerBeforeMetadata, if self.__class__ in (TestContainerBrokerBeforeMetadata,
TestContainerBrokerBeforeXSync, TestContainerBrokerBeforeXSync,
@ -572,40 +572,40 @@ class TestContainerBroker(unittest.TestCase):
self.assertEqual(info['status_changed_at'], '0') self.assertEqual(info['status_changed_at'], '0')
else: else:
self.assertEqual(info['status_changed_at'], self.assertEqual(info['status_changed_at'],
normalize_timestamp(1)) Timestamp(1).internal)
info = broker.get_info() info = broker.get_info()
self.assertEquals(info['object_count'], 0) self.assertEquals(info['object_count'], 0)
self.assertEquals(info['bytes_used'], 0) self.assertEquals(info['bytes_used'], 0)
broker.put_object('o1', normalize_timestamp(time()), 123, 'text/plain', broker.put_object('o1', Timestamp(time()).internal, 123, 'text/plain',
'5af83e3196bf99f440f31f2e1a6c9afe') '5af83e3196bf99f440f31f2e1a6c9afe')
info = broker.get_info() info = broker.get_info()
self.assertEquals(info['object_count'], 1) self.assertEquals(info['object_count'], 1)
self.assertEquals(info['bytes_used'], 123) self.assertEquals(info['bytes_used'], 123)
sleep(.00001) sleep(.00001)
broker.put_object('o2', normalize_timestamp(time()), 123, 'text/plain', broker.put_object('o2', Timestamp(time()).internal, 123, 'text/plain',
'5af83e3196bf99f440f31f2e1a6c9afe') '5af83e3196bf99f440f31f2e1a6c9afe')
info = broker.get_info() info = broker.get_info()
self.assertEquals(info['object_count'], 2) self.assertEquals(info['object_count'], 2)
self.assertEquals(info['bytes_used'], 246) self.assertEquals(info['bytes_used'], 246)
sleep(.00001) sleep(.00001)
broker.put_object('o2', normalize_timestamp(time()), 1000, broker.put_object('o2', Timestamp(time()).internal, 1000,
'text/plain', '5af83e3196bf99f440f31f2e1a6c9afe') 'text/plain', '5af83e3196bf99f440f31f2e1a6c9afe')
info = broker.get_info() info = broker.get_info()
self.assertEquals(info['object_count'], 2) self.assertEquals(info['object_count'], 2)
self.assertEquals(info['bytes_used'], 1123) self.assertEquals(info['bytes_used'], 1123)
sleep(.00001) sleep(.00001)
broker.delete_object('o1', normalize_timestamp(time())) broker.delete_object('o1', Timestamp(time()).internal)
info = broker.get_info() info = broker.get_info()
self.assertEquals(info['object_count'], 1) self.assertEquals(info['object_count'], 1)
self.assertEquals(info['bytes_used'], 1000) self.assertEquals(info['bytes_used'], 1000)
sleep(.00001) sleep(.00001)
broker.delete_object('o2', normalize_timestamp(time())) broker.delete_object('o2', Timestamp(time()).internal)
info = broker.get_info() info = broker.get_info()
self.assertEquals(info['object_count'], 0) self.assertEquals(info['object_count'], 0)
self.assertEquals(info['bytes_used'], 0) self.assertEquals(info['bytes_used'], 0)
@ -617,7 +617,7 @@ class TestContainerBroker(unittest.TestCase):
def test_set_x_syncs(self): def test_set_x_syncs(self):
broker = ContainerBroker(':memory:', account='test1', broker = ContainerBroker(':memory:', account='test1',
container='test2') container='test2')
broker.initialize(normalize_timestamp('1'), 0) broker.initialize(Timestamp('1').internal, 0)
info = broker.get_info() info = broker.get_info()
self.assertEquals(info['x_container_sync_point1'], -1) self.assertEquals(info['x_container_sync_point1'], -1)
@ -631,7 +631,7 @@ class TestContainerBroker(unittest.TestCase):
def test_get_report_info(self): def test_get_report_info(self):
broker = ContainerBroker(':memory:', account='test1', broker = ContainerBroker(':memory:', account='test1',
container='test2') container='test2')
broker.initialize(normalize_timestamp('1'), 0) broker.initialize(Timestamp('1').internal, 0)
info = broker.get_info() info = broker.get_info()
self.assertEquals(info['account'], 'test1') self.assertEquals(info['account'], 'test1')
@ -641,7 +641,7 @@ class TestContainerBroker(unittest.TestCase):
self.assertEquals(info['reported_object_count'], 0) self.assertEquals(info['reported_object_count'], 0)
self.assertEquals(info['reported_bytes_used'], 0) self.assertEquals(info['reported_bytes_used'], 0)
broker.put_object('o1', normalize_timestamp(time()), 123, 'text/plain', broker.put_object('o1', Timestamp(time()).internal, 123, 'text/plain',
'5af83e3196bf99f440f31f2e1a6c9afe') '5af83e3196bf99f440f31f2e1a6c9afe')
info = broker.get_info() info = broker.get_info()
self.assertEquals(info['object_count'], 1) self.assertEquals(info['object_count'], 1)
@ -650,7 +650,7 @@ class TestContainerBroker(unittest.TestCase):
self.assertEquals(info['reported_bytes_used'], 0) self.assertEquals(info['reported_bytes_used'], 0)
sleep(.00001) sleep(.00001)
broker.put_object('o2', normalize_timestamp(time()), 123, 'text/plain', broker.put_object('o2', Timestamp(time()).internal, 123, 'text/plain',
'5af83e3196bf99f440f31f2e1a6c9afe') '5af83e3196bf99f440f31f2e1a6c9afe')
info = broker.get_info() info = broker.get_info()
self.assertEquals(info['object_count'], 2) self.assertEquals(info['object_count'], 2)
@ -659,7 +659,7 @@ class TestContainerBroker(unittest.TestCase):
self.assertEquals(info['reported_bytes_used'], 0) self.assertEquals(info['reported_bytes_used'], 0)
sleep(.00001) sleep(.00001)
broker.put_object('o2', normalize_timestamp(time()), 1000, broker.put_object('o2', Timestamp(time()).internal, 1000,
'text/plain', '5af83e3196bf99f440f31f2e1a6c9afe') 'text/plain', '5af83e3196bf99f440f31f2e1a6c9afe')
info = broker.get_info() info = broker.get_info()
self.assertEquals(info['object_count'], 2) self.assertEquals(info['object_count'], 2)
@ -667,9 +667,9 @@ class TestContainerBroker(unittest.TestCase):
self.assertEquals(info['reported_object_count'], 0) self.assertEquals(info['reported_object_count'], 0)
self.assertEquals(info['reported_bytes_used'], 0) self.assertEquals(info['reported_bytes_used'], 0)
put_timestamp = normalize_timestamp(time()) put_timestamp = Timestamp(time()).internal
sleep(.001) sleep(.001)
delete_timestamp = normalize_timestamp(time()) delete_timestamp = Timestamp(time()).internal
broker.reported(put_timestamp, delete_timestamp, 2, 1123) broker.reported(put_timestamp, delete_timestamp, 2, 1123)
info = broker.get_info() info = broker.get_info()
self.assertEquals(info['object_count'], 2) self.assertEquals(info['object_count'], 2)
@ -680,7 +680,7 @@ class TestContainerBroker(unittest.TestCase):
self.assertEquals(info['reported_bytes_used'], 1123) self.assertEquals(info['reported_bytes_used'], 1123)
sleep(.00001) sleep(.00001)
broker.delete_object('o1', normalize_timestamp(time())) broker.delete_object('o1', Timestamp(time()).internal)
info = broker.get_info() info = broker.get_info()
self.assertEquals(info['object_count'], 1) self.assertEquals(info['object_count'], 1)
self.assertEquals(info['bytes_used'], 1000) self.assertEquals(info['bytes_used'], 1000)
@ -688,7 +688,7 @@ class TestContainerBroker(unittest.TestCase):
self.assertEquals(info['reported_bytes_used'], 1123) self.assertEquals(info['reported_bytes_used'], 1123)
sleep(.00001) sleep(.00001)
broker.delete_object('o2', normalize_timestamp(time())) broker.delete_object('o2', Timestamp(time()).internal)
info = broker.get_info() info = broker.get_info()
self.assertEquals(info['object_count'], 0) self.assertEquals(info['object_count'], 0)
self.assertEquals(info['bytes_used'], 0) self.assertEquals(info['bytes_used'], 0)
@ -698,20 +698,20 @@ class TestContainerBroker(unittest.TestCase):
def test_list_objects_iter(self): def test_list_objects_iter(self):
# Test ContainerBroker.list_objects_iter # Test ContainerBroker.list_objects_iter
broker = ContainerBroker(':memory:', account='a', container='c') broker = ContainerBroker(':memory:', account='a', container='c')
broker.initialize(normalize_timestamp('1'), 0) broker.initialize(Timestamp('1').internal, 0)
for obj1 in xrange(4): for obj1 in xrange(4):
for obj2 in xrange(125): for obj2 in xrange(125):
broker.put_object('%d/%04d' % (obj1, obj2), broker.put_object('%d/%04d' % (obj1, obj2),
normalize_timestamp(time()), 0, 'text/plain', Timestamp(time()).internal, 0, 'text/plain',
'd41d8cd98f00b204e9800998ecf8427e') 'd41d8cd98f00b204e9800998ecf8427e')
for obj in xrange(125): for obj in xrange(125):
broker.put_object('2/0051/%04d' % obj, broker.put_object('2/0051/%04d' % obj,
normalize_timestamp(time()), 0, 'text/plain', Timestamp(time()).internal, 0, 'text/plain',
'd41d8cd98f00b204e9800998ecf8427e') 'd41d8cd98f00b204e9800998ecf8427e')
for obj in xrange(125): for obj in xrange(125):
broker.put_object('3/%04d/0049' % obj, broker.put_object('3/%04d/0049' % obj,
normalize_timestamp(time()), 0, 'text/plain', Timestamp(time()).internal, 0, 'text/plain',
'd41d8cd98f00b204e9800998ecf8427e') 'd41d8cd98f00b204e9800998ecf8427e')
listing = broker.list_objects_iter(100, '', None, None, '') listing = broker.list_objects_iter(100, '', None, None, '')
@ -777,7 +777,7 @@ class TestContainerBroker(unittest.TestCase):
'3/0047/', '3/0048', '3/0048/', '3/0049', '3/0047/', '3/0048', '3/0048/', '3/0049',
'3/0049/', '3/0050']) '3/0049/', '3/0050'])
broker.put_object('3/0049/', normalize_timestamp(time()), 0, broker.put_object('3/0049/', Timestamp(time()).internal, 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e') 'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
listing = broker.list_objects_iter(10, '3/0048', None, None, None) listing = broker.list_objects_iter(10, '3/0048', None, None, None)
self.assertEquals(len(listing), 10) self.assertEquals(len(listing), 10)
@ -817,20 +817,20 @@ class TestContainerBroker(unittest.TestCase):
# Test ContainerBroker.list_objects_iter using a # Test ContainerBroker.list_objects_iter using a
# delimiter that is not a slash # delimiter that is not a slash
broker = ContainerBroker(':memory:', account='a', container='c') broker = ContainerBroker(':memory:', account='a', container='c')
broker.initialize(normalize_timestamp('1'), 0) broker.initialize(Timestamp('1').internal, 0)
for obj1 in xrange(4): for obj1 in xrange(4):
for obj2 in xrange(125): for obj2 in xrange(125):
broker.put_object('%d:%04d' % (obj1, obj2), broker.put_object('%d:%04d' % (obj1, obj2),
normalize_timestamp(time()), 0, 'text/plain', Timestamp(time()).internal, 0, 'text/plain',
'd41d8cd98f00b204e9800998ecf8427e') 'd41d8cd98f00b204e9800998ecf8427e')
for obj in xrange(125): for obj in xrange(125):
broker.put_object('2:0051:%04d' % obj, broker.put_object('2:0051:%04d' % obj,
normalize_timestamp(time()), 0, 'text/plain', Timestamp(time()).internal, 0, 'text/plain',
'd41d8cd98f00b204e9800998ecf8427e') 'd41d8cd98f00b204e9800998ecf8427e')
for obj in xrange(125): for obj in xrange(125):
broker.put_object('3:%04d:0049' % obj, broker.put_object('3:%04d:0049' % obj,
normalize_timestamp(time()), 0, 'text/plain', Timestamp(time()).internal, 0, 'text/plain',
'd41d8cd98f00b204e9800998ecf8427e') 'd41d8cd98f00b204e9800998ecf8427e')
listing = broker.list_objects_iter(100, '', None, None, '') listing = broker.list_objects_iter(100, '', None, None, '')
@ -895,7 +895,7 @@ class TestContainerBroker(unittest.TestCase):
'3:0047:', '3:0048', '3:0048:', '3:0049', '3:0047:', '3:0048', '3:0048:', '3:0049',
'3:0049:', '3:0050']) '3:0049:', '3:0050'])
broker.put_object('3:0049:', normalize_timestamp(time()), 0, broker.put_object('3:0049:', Timestamp(time()).internal, 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e') 'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
listing = broker.list_objects_iter(10, '3:0048', None, None, None) listing = broker.list_objects_iter(10, '3:0048', None, None, None)
self.assertEquals(len(listing), 10) self.assertEquals(len(listing), 10)
@ -934,25 +934,25 @@ class TestContainerBroker(unittest.TestCase):
def test_list_objects_iter_prefix_delim(self): def test_list_objects_iter_prefix_delim(self):
# Test ContainerBroker.list_objects_iter # Test ContainerBroker.list_objects_iter
broker = ContainerBroker(':memory:', account='a', container='c') broker = ContainerBroker(':memory:', account='a', container='c')
broker.initialize(normalize_timestamp('1'), 0) broker.initialize(Timestamp('1').internal, 0)
broker.put_object( broker.put_object(
'/pets/dogs/1', normalize_timestamp(0), 0, '/pets/dogs/1', Timestamp(0).internal, 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e') 'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
broker.put_object( broker.put_object(
'/pets/dogs/2', normalize_timestamp(0), 0, '/pets/dogs/2', Timestamp(0).internal, 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e') 'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
broker.put_object( broker.put_object(
'/pets/fish/a', normalize_timestamp(0), 0, '/pets/fish/a', Timestamp(0).internal, 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e') 'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
broker.put_object( broker.put_object(
'/pets/fish/b', normalize_timestamp(0), 0, '/pets/fish/b', Timestamp(0).internal, 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e') 'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
broker.put_object( broker.put_object(
'/pets/fish_info.txt', normalize_timestamp(0), 0, '/pets/fish_info.txt', Timestamp(0).internal, 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e') 'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
broker.put_object( broker.put_object(
'/snakes', normalize_timestamp(0), 0, '/snakes', Timestamp(0).internal, 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e') 'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
#def list_objects_iter(self, limit, marker, prefix, delimiter, #def list_objects_iter(self, limit, marker, prefix, delimiter,
@ -971,50 +971,50 @@ class TestContainerBroker(unittest.TestCase):
# Test ContainerBroker.list_objects_iter for a # Test ContainerBroker.list_objects_iter for a
# container that has an odd file with a trailing delimiter # container that has an odd file with a trailing delimiter
broker = ContainerBroker(':memory:', account='a', container='c') broker = ContainerBroker(':memory:', account='a', container='c')
broker.initialize(normalize_timestamp('1'), 0) broker.initialize(Timestamp('1').internal, 0)
broker.put_object('a', normalize_timestamp(time()), 0, broker.put_object('a', Timestamp(time()).internal, 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e') 'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
broker.put_object('a/', normalize_timestamp(time()), 0, broker.put_object('a/', Timestamp(time()).internal, 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e') 'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
broker.put_object('a/a', normalize_timestamp(time()), 0, broker.put_object('a/a', Timestamp(time()).internal, 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e') 'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
broker.put_object('a/a/a', normalize_timestamp(time()), 0, broker.put_object('a/a/a', Timestamp(time()).internal, 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e') 'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
broker.put_object('a/a/b', normalize_timestamp(time()), 0, broker.put_object('a/a/b', Timestamp(time()).internal, 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e') 'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
broker.put_object('a/b', normalize_timestamp(time()), 0, broker.put_object('a/b', Timestamp(time()).internal, 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e') 'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
broker.put_object('b', normalize_timestamp(time()), 0, broker.put_object('b', Timestamp(time()).internal, 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e') 'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
broker.put_object('b/a', normalize_timestamp(time()), 0, broker.put_object('b/a', Timestamp(time()).internal, 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e') 'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
broker.put_object('b/b', normalize_timestamp(time()), 0, broker.put_object('b/b', Timestamp(time()).internal, 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e') 'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
broker.put_object('c', normalize_timestamp(time()), 0, broker.put_object('c', Timestamp(time()).internal, 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e') 'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
broker.put_object('a/0', normalize_timestamp(time()), 0, broker.put_object('a/0', Timestamp(time()).internal, 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e') 'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
broker.put_object('0', normalize_timestamp(time()), 0, broker.put_object('0', Timestamp(time()).internal, 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e') 'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
broker.put_object('0/', normalize_timestamp(time()), 0, broker.put_object('0/', Timestamp(time()).internal, 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e') 'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
broker.put_object('00', normalize_timestamp(time()), 0, broker.put_object('00', Timestamp(time()).internal, 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e') 'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
broker.put_object('0/0', normalize_timestamp(time()), 0, broker.put_object('0/0', Timestamp(time()).internal, 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e') 'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
broker.put_object('0/00', normalize_timestamp(time()), 0, broker.put_object('0/00', Timestamp(time()).internal, 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e') 'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
broker.put_object('0/1', normalize_timestamp(time()), 0, broker.put_object('0/1', Timestamp(time()).internal, 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e') 'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
broker.put_object('0/1/', normalize_timestamp(time()), 0, broker.put_object('0/1/', Timestamp(time()).internal, 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e') 'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
broker.put_object('0/1/0', normalize_timestamp(time()), 0, broker.put_object('0/1/0', Timestamp(time()).internal, 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e') 'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
broker.put_object('1', normalize_timestamp(time()), 0, broker.put_object('1', Timestamp(time()).internal, 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e') 'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
broker.put_object('1/', normalize_timestamp(time()), 0, broker.put_object('1/', Timestamp(time()).internal, 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e') 'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
broker.put_object('1/0', normalize_timestamp(time()), 0, broker.put_object('1/0', Timestamp(time()).internal, 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e') 'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
listing = broker.list_objects_iter(25, None, None, None, None) listing = broker.list_objects_iter(25, None, None, None, None)
self.assertEquals(len(listing), 22) self.assertEquals(len(listing), 22)
@ -1051,50 +1051,50 @@ class TestContainerBroker(unittest.TestCase):
# Test ContainerBroker.list_objects_iter for a # Test ContainerBroker.list_objects_iter for a
# container that has an odd file with a trailing delimiter # container that has an odd file with a trailing delimiter
broker = ContainerBroker(':memory:', account='a', container='c') broker = ContainerBroker(':memory:', account='a', container='c')
broker.initialize(normalize_timestamp('1'), 0) broker.initialize(Timestamp('1').internal, 0)
broker.put_object('a', normalize_timestamp(time()), 0, broker.put_object('a', Timestamp(time()).internal, 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e') 'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
broker.put_object('a:', normalize_timestamp(time()), 0, broker.put_object('a:', Timestamp(time()).internal, 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e') 'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
broker.put_object('a:a', normalize_timestamp(time()), 0, broker.put_object('a:a', Timestamp(time()).internal, 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e') 'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
broker.put_object('a:a:a', normalize_timestamp(time()), 0, broker.put_object('a:a:a', Timestamp(time()).internal, 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e') 'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
broker.put_object('a:a:b', normalize_timestamp(time()), 0, broker.put_object('a:a:b', Timestamp(time()).internal, 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e') 'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
broker.put_object('a:b', normalize_timestamp(time()), 0, broker.put_object('a:b', Timestamp(time()).internal, 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e') 'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
broker.put_object('b', normalize_timestamp(time()), 0, broker.put_object('b', Timestamp(time()).internal, 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e') 'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
broker.put_object('b:a', normalize_timestamp(time()), 0, broker.put_object('b:a', Timestamp(time()).internal, 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e') 'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
broker.put_object('b:b', normalize_timestamp(time()), 0, broker.put_object('b:b', Timestamp(time()).internal, 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e') 'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
broker.put_object('c', normalize_timestamp(time()), 0, broker.put_object('c', Timestamp(time()).internal, 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e') 'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
broker.put_object('a:0', normalize_timestamp(time()), 0, broker.put_object('a:0', Timestamp(time()).internal, 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e') 'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
broker.put_object('0', normalize_timestamp(time()), 0, broker.put_object('0', Timestamp(time()).internal, 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e') 'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
broker.put_object('0:', normalize_timestamp(time()), 0, broker.put_object('0:', Timestamp(time()).internal, 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e') 'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
broker.put_object('00', normalize_timestamp(time()), 0, broker.put_object('00', Timestamp(time()).internal, 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e') 'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
broker.put_object('0:0', normalize_timestamp(time()), 0, broker.put_object('0:0', Timestamp(time()).internal, 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e') 'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
broker.put_object('0:00', normalize_timestamp(time()), 0, broker.put_object('0:00', Timestamp(time()).internal, 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e') 'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
broker.put_object('0:1', normalize_timestamp(time()), 0, broker.put_object('0:1', Timestamp(time()).internal, 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e') 'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
broker.put_object('0:1:', normalize_timestamp(time()), 0, broker.put_object('0:1:', Timestamp(time()).internal, 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e') 'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
broker.put_object('0:1:0', normalize_timestamp(time()), 0, broker.put_object('0:1:0', Timestamp(time()).internal, 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e') 'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
broker.put_object('1', normalize_timestamp(time()), 0, broker.put_object('1', Timestamp(time()).internal, 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e') 'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
broker.put_object('1:', normalize_timestamp(time()), 0, broker.put_object('1:', Timestamp(time()).internal, 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e') 'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
broker.put_object('1:0', normalize_timestamp(time()), 0, broker.put_object('1:0', Timestamp(time()).internal, 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e') 'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
listing = broker.list_objects_iter(25, None, None, None, None) listing = broker.list_objects_iter(25, None, None, None, None)
self.assertEquals(len(listing), 22) self.assertEquals(len(listing), 22)
@ -1129,19 +1129,19 @@ class TestContainerBroker(unittest.TestCase):
def test_chexor(self): def test_chexor(self):
broker = ContainerBroker(':memory:', account='a', container='c') broker = ContainerBroker(':memory:', account='a', container='c')
broker.initialize(normalize_timestamp('1'), 0) broker.initialize(Timestamp('1').internal, 0)
broker.put_object('a', normalize_timestamp(1), 0, broker.put_object('a', Timestamp(1).internal, 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e') 'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
broker.put_object('b', normalize_timestamp(2), 0, broker.put_object('b', Timestamp(2).internal, 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e') 'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
hasha = hashlib.md5('%s-%s' % ('a', '0000000001.00000')).digest() hasha = hashlib.md5('%s-%s' % ('a', Timestamp(1).internal)).digest()
hashb = hashlib.md5('%s-%s' % ('b', '0000000002.00000')).digest() hashb = hashlib.md5('%s-%s' % ('b', Timestamp(2).internal)).digest()
hashc = ''.join( hashc = ''.join(
('%2x' % (ord(a) ^ ord(b)) for a, b in zip(hasha, hashb))) ('%02x' % (ord(a) ^ ord(b)) for a, b in zip(hasha, hashb)))
self.assertEquals(broker.get_info()['hash'], hashc) self.assertEquals(broker.get_info()['hash'], hashc)
broker.put_object('b', normalize_timestamp(3), 0, broker.put_object('b', Timestamp(3).internal, 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e') 'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
hashb = hashlib.md5('%s-%s' % ('b', '0000000003.00000')).digest() hashb = hashlib.md5('%s-%s' % ('b', Timestamp(3).internal)).digest()
hashc = ''.join( hashc = ''.join(
('%02x' % (ord(a) ^ ord(b)) for a, b in zip(hasha, hashb))) ('%02x' % (ord(a) ^ ord(b)) for a, b in zip(hasha, hashb)))
self.assertEquals(broker.get_info()['hash'], hashc) self.assertEquals(broker.get_info()['hash'], hashc)
@ -1149,7 +1149,7 @@ class TestContainerBroker(unittest.TestCase):
def test_newid(self): def test_newid(self):
# test DatabaseBroker.newid # test DatabaseBroker.newid
broker = ContainerBroker(':memory:', account='a', container='c') broker = ContainerBroker(':memory:', account='a', container='c')
broker.initialize(normalize_timestamp('1'), 0) broker.initialize(Timestamp('1').internal, 0)
id = broker.get_info()['id'] id = broker.get_info()['id']
broker.newid('someid') broker.newid('someid')
self.assertNotEquals(id, broker.get_info()['id']) self.assertNotEquals(id, broker.get_info()['id'])
@ -1157,11 +1157,11 @@ class TestContainerBroker(unittest.TestCase):
def test_get_items_since(self): def test_get_items_since(self):
# test DatabaseBroker.get_items_since # test DatabaseBroker.get_items_since
broker = ContainerBroker(':memory:', account='a', container='c') broker = ContainerBroker(':memory:', account='a', container='c')
broker.initialize(normalize_timestamp('1'), 0) broker.initialize(Timestamp('1').internal, 0)
broker.put_object('a', normalize_timestamp(1), 0, broker.put_object('a', Timestamp(1).internal, 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e') 'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
max_row = broker.get_replication_info()['max_row'] max_row = broker.get_replication_info()['max_row']
broker.put_object('b', normalize_timestamp(2), 0, broker.put_object('b', Timestamp(2).internal, 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e') 'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
items = broker.get_items_since(max_row, 1000) items = broker.get_items_since(max_row, 1000)
self.assertEquals(len(items), 1) self.assertEquals(len(items), 1)
@ -1170,9 +1170,9 @@ class TestContainerBroker(unittest.TestCase):
def test_sync_merging(self): def test_sync_merging(self):
# exercise the DatabaseBroker sync functions a bit # exercise the DatabaseBroker sync functions a bit
broker1 = ContainerBroker(':memory:', account='a', container='c') broker1 = ContainerBroker(':memory:', account='a', container='c')
broker1.initialize(normalize_timestamp('1'), 0) broker1.initialize(Timestamp('1').internal, 0)
broker2 = ContainerBroker(':memory:', account='a', container='c') broker2 = ContainerBroker(':memory:', account='a', container='c')
broker2.initialize(normalize_timestamp('1'), 0) broker2.initialize(Timestamp('1').internal, 0)
self.assertEquals(broker2.get_sync('12345'), -1) self.assertEquals(broker2.get_sync('12345'), -1)
broker1.merge_syncs([{'sync_point': 3, 'remote_id': '12345'}]) broker1.merge_syncs([{'sync_point': 3, 'remote_id': '12345'}])
broker2.merge_syncs(broker1.get_syncs()) broker2.merge_syncs(broker1.get_syncs())
@ -1180,12 +1180,12 @@ class TestContainerBroker(unittest.TestCase):
def test_merge_items(self): def test_merge_items(self):
broker1 = ContainerBroker(':memory:', account='a', container='c') broker1 = ContainerBroker(':memory:', account='a', container='c')
broker1.initialize(normalize_timestamp('1'), 0) broker1.initialize(Timestamp('1').internal, 0)
broker2 = ContainerBroker(':memory:', account='a', container='c') broker2 = ContainerBroker(':memory:', account='a', container='c')
broker2.initialize(normalize_timestamp('1'), 0) broker2.initialize(Timestamp('1').internal, 0)
broker1.put_object('a', normalize_timestamp(1), 0, broker1.put_object('a', Timestamp(1).internal, 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e') 'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
broker1.put_object('b', normalize_timestamp(2), 0, broker1.put_object('b', Timestamp(2).internal, 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e') 'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
id = broker1.get_info()['id'] id = broker1.get_info()['id']
broker2.merge_items(broker1.get_items_since( broker2.merge_items(broker1.get_items_since(
@ -1193,7 +1193,7 @@ class TestContainerBroker(unittest.TestCase):
items = broker2.get_items_since(-1, 1000) items = broker2.get_items_since(-1, 1000)
self.assertEquals(len(items), 2) self.assertEquals(len(items), 2)
self.assertEquals(['a', 'b'], sorted([rec['name'] for rec in items])) self.assertEquals(['a', 'b'], sorted([rec['name'] for rec in items]))
broker1.put_object('c', normalize_timestamp(3), 0, broker1.put_object('c', Timestamp(3).internal, 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e') 'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
broker2.merge_items(broker1.get_items_since( broker2.merge_items(broker1.get_items_since(
broker2.get_sync(id), 1000), id) broker2.get_sync(id), 1000), id)
@ -1205,17 +1205,17 @@ class TestContainerBroker(unittest.TestCase):
def test_merge_items_overwrite(self): def test_merge_items_overwrite(self):
# test DatabaseBroker.merge_items # test DatabaseBroker.merge_items
broker1 = ContainerBroker(':memory:', account='a', container='c') broker1 = ContainerBroker(':memory:', account='a', container='c')
broker1.initialize(normalize_timestamp('1'), 0) broker1.initialize(Timestamp('1').internal, 0)
id = broker1.get_info()['id'] id = broker1.get_info()['id']
broker2 = ContainerBroker(':memory:', account='a', container='c') broker2 = ContainerBroker(':memory:', account='a', container='c')
broker2.initialize(normalize_timestamp('1'), 0) broker2.initialize(Timestamp('1').internal, 0)
broker1.put_object('a', normalize_timestamp(2), 0, broker1.put_object('a', Timestamp(2).internal, 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e') 'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
broker1.put_object('b', normalize_timestamp(3), 0, broker1.put_object('b', Timestamp(3).internal, 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e') 'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
broker2.merge_items(broker1.get_items_since( broker2.merge_items(broker1.get_items_since(
broker2.get_sync(id), 1000), id) broker2.get_sync(id), 1000), id)
broker1.put_object('a', normalize_timestamp(4), 0, broker1.put_object('a', Timestamp(4).internal, 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e') 'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
broker2.merge_items(broker1.get_items_since( broker2.merge_items(broker1.get_items_since(
broker2.get_sync(id), 1000), id) broker2.get_sync(id), 1000), id)
@ -1223,24 +1223,24 @@ class TestContainerBroker(unittest.TestCase):
self.assertEquals(['a', 'b'], sorted([rec['name'] for rec in items])) self.assertEquals(['a', 'b'], sorted([rec['name'] for rec in items]))
for rec in items: for rec in items:
if rec['name'] == 'a': if rec['name'] == 'a':
self.assertEquals(rec['created_at'], normalize_timestamp(4)) self.assertEquals(rec['created_at'], Timestamp(4).internal)
if rec['name'] == 'b': if rec['name'] == 'b':
self.assertEquals(rec['created_at'], normalize_timestamp(3)) self.assertEquals(rec['created_at'], Timestamp(3).internal)
def test_merge_items_post_overwrite_out_of_order(self): def test_merge_items_post_overwrite_out_of_order(self):
# test DatabaseBroker.merge_items # test DatabaseBroker.merge_items
broker1 = ContainerBroker(':memory:', account='a', container='c') broker1 = ContainerBroker(':memory:', account='a', container='c')
broker1.initialize(normalize_timestamp('1'), 0) broker1.initialize(Timestamp('1').internal, 0)
id = broker1.get_info()['id'] id = broker1.get_info()['id']
broker2 = ContainerBroker(':memory:', account='a', container='c') broker2 = ContainerBroker(':memory:', account='a', container='c')
broker2.initialize(normalize_timestamp('1'), 0) broker2.initialize(Timestamp('1').internal, 0)
broker1.put_object('a', normalize_timestamp(2), 0, broker1.put_object('a', Timestamp(2).internal, 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e') 'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
broker1.put_object('b', normalize_timestamp(3), 0, broker1.put_object('b', Timestamp(3).internal, 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e') 'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
broker2.merge_items(broker1.get_items_since( broker2.merge_items(broker1.get_items_since(
broker2.get_sync(id), 1000), id) broker2.get_sync(id), 1000), id)
broker1.put_object('a', normalize_timestamp(4), 0, broker1.put_object('a', Timestamp(4).internal, 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e') 'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
broker2.merge_items(broker1.get_items_since( broker2.merge_items(broker1.get_items_since(
broker2.get_sync(id), 1000), id) broker2.get_sync(id), 1000), id)
@ -1248,18 +1248,18 @@ class TestContainerBroker(unittest.TestCase):
self.assertEquals(['a', 'b'], sorted([rec['name'] for rec in items])) self.assertEquals(['a', 'b'], sorted([rec['name'] for rec in items]))
for rec in items: for rec in items:
if rec['name'] == 'a': if rec['name'] == 'a':
self.assertEquals(rec['created_at'], normalize_timestamp(4)) self.assertEquals(rec['created_at'], Timestamp(4).internal)
if rec['name'] == 'b': if rec['name'] == 'b':
self.assertEquals(rec['created_at'], normalize_timestamp(3)) self.assertEquals(rec['created_at'], Timestamp(3).internal)
self.assertEquals(rec['content_type'], 'text/plain') self.assertEquals(rec['content_type'], 'text/plain')
items = broker2.get_items_since(-1, 1000) items = broker2.get_items_since(-1, 1000)
self.assertEquals(['a', 'b'], sorted([rec['name'] for rec in items])) self.assertEquals(['a', 'b'], sorted([rec['name'] for rec in items]))
for rec in items: for rec in items:
if rec['name'] == 'a': if rec['name'] == 'a':
self.assertEquals(rec['created_at'], normalize_timestamp(4)) self.assertEquals(rec['created_at'], Timestamp(4).internal)
if rec['name'] == 'b': if rec['name'] == 'b':
self.assertEquals(rec['created_at'], normalize_timestamp(3)) self.assertEquals(rec['created_at'], Timestamp(3).internal)
broker1.put_object('b', normalize_timestamp(5), 0, broker1.put_object('b', Timestamp(5).internal, 0,
'text/plain', 'd41d8cd98f00b204e9800998ecf8427e') 'text/plain', 'd41d8cd98f00b204e9800998ecf8427e')
broker2.merge_items(broker1.get_items_since( broker2.merge_items(broker1.get_items_since(
broker2.get_sync(id), 1000), id) broker2.get_sync(id), 1000), id)
@ -1267,13 +1267,13 @@ class TestContainerBroker(unittest.TestCase):
self.assertEquals(['a', 'b'], sorted([rec['name'] for rec in items])) self.assertEquals(['a', 'b'], sorted([rec['name'] for rec in items]))
for rec in items: for rec in items:
if rec['name'] == 'a': if rec['name'] == 'a':
self.assertEquals(rec['created_at'], normalize_timestamp(4)) self.assertEquals(rec['created_at'], Timestamp(4).internal)
if rec['name'] == 'b': if rec['name'] == 'b':
self.assertEquals(rec['created_at'], normalize_timestamp(5)) self.assertEquals(rec['created_at'], Timestamp(5).internal)
self.assertEquals(rec['content_type'], 'text/plain') self.assertEquals(rec['content_type'], 'text/plain')
def test_set_storage_policy_index(self): def test_set_storage_policy_index(self):
ts = (normalize_timestamp(t) for t in ts = (Timestamp(t).internal for t in
itertools.count(int(time()))) itertools.count(int(time())))
broker = ContainerBroker(':memory:', account='test_account', broker = ContainerBroker(':memory:', account='test_account',
container='test_container') container='test_container')
@ -1329,7 +1329,7 @@ class TestContainerBroker(unittest.TestCase):
# never-had-an-object container to make sure we handle it # never-had-an-object container to make sure we handle it
broker = ContainerBroker(':memory:', account='test_account', broker = ContainerBroker(':memory:', account='test_account',
container='test_container') container='test_container')
broker.initialize(normalize_timestamp('1'), 0) broker.initialize(Timestamp('1').internal, 0)
info = broker.get_info() info = broker.get_info()
self.assertEqual(0, info['storage_policy_index']) self.assertEqual(0, info['storage_policy_index'])
@ -1340,14 +1340,14 @@ class TestContainerBroker(unittest.TestCase):
def test_reconciler_sync(self): def test_reconciler_sync(self):
broker = ContainerBroker(':memory:', account='test_account', broker = ContainerBroker(':memory:', account='test_account',
container='test_container') container='test_container')
broker.initialize(normalize_timestamp('1'), 0) broker.initialize(Timestamp('1').internal, 0)
self.assertEquals(-1, broker.get_reconciler_sync()) self.assertEquals(-1, broker.get_reconciler_sync())
broker.update_reconciler_sync(10) broker.update_reconciler_sync(10)
self.assertEquals(10, broker.get_reconciler_sync()) self.assertEquals(10, broker.get_reconciler_sync())
@with_tempdir @with_tempdir
def test_legacy_pending_files(self, tempdir): def test_legacy_pending_files(self, tempdir):
ts = (normalize_timestamp(t) for t in ts = (Timestamp(t).internal for t in
itertools.count(int(time()))) itertools.count(int(time())))
db_path = os.path.join(tempdir, 'container.db') db_path = os.path.join(tempdir, 'container.db')
@ -1458,7 +1458,7 @@ def premetadata_create_container_info_table(self, conn, put_timestamp,
:param put_timestamp: put timestamp :param put_timestamp: put timestamp
""" """
if put_timestamp is None: if put_timestamp is None:
put_timestamp = normalize_timestamp(0) put_timestamp = Timestamp(0).internal
conn.executescript(''' conn.executescript('''
CREATE TABLE container_stat ( CREATE TABLE container_stat (
account TEXT, account TEXT,
@ -1485,7 +1485,7 @@ def premetadata_create_container_info_table(self, conn, put_timestamp,
UPDATE container_stat UPDATE container_stat
SET account = ?, container = ?, created_at = ?, id = ?, SET account = ?, container = ?, created_at = ?, id = ?,
put_timestamp = ? put_timestamp = ?
''', (self.account, self.container, normalize_timestamp(time()), ''', (self.account, self.container, Timestamp(time()).internal,
str(uuid4()), put_timestamp)) str(uuid4()), put_timestamp))
@ -1499,7 +1499,7 @@ class TestContainerBrokerBeforeMetadata(ContainerBrokerMigrationMixin,
def setUp(self): def setUp(self):
super(TestContainerBrokerBeforeMetadata, self).setUp() super(TestContainerBrokerBeforeMetadata, self).setUp()
broker = ContainerBroker(':memory:', account='a', container='c') broker = ContainerBroker(':memory:', account='a', container='c')
broker.initialize(normalize_timestamp('1'), 0) broker.initialize(Timestamp('1').internal, 0)
exc = None exc = None
with broker.get() as conn: with broker.get() as conn:
try: try:
@ -1511,7 +1511,7 @@ class TestContainerBrokerBeforeMetadata(ContainerBrokerMigrationMixin,
def tearDown(self): def tearDown(self):
super(TestContainerBrokerBeforeMetadata, self).tearDown() super(TestContainerBrokerBeforeMetadata, self).tearDown()
broker = ContainerBroker(':memory:', account='a', container='c') broker = ContainerBroker(':memory:', account='a', container='c')
broker.initialize(normalize_timestamp('1'), 0) broker.initialize(Timestamp('1').internal, 0)
with broker.get() as conn: with broker.get() as conn:
conn.execute('SELECT metadata FROM container_stat') conn.execute('SELECT metadata FROM container_stat')
@ -1529,7 +1529,7 @@ def prexsync_create_container_info_table(self, conn, put_timestamp,
:param put_timestamp: put timestamp :param put_timestamp: put timestamp
""" """
if put_timestamp is None: if put_timestamp is None:
put_timestamp = normalize_timestamp(0) put_timestamp = Timestamp(0).internal
conn.executescript(""" conn.executescript("""
CREATE TABLE container_stat ( CREATE TABLE container_stat (
account TEXT, account TEXT,
@ -1557,7 +1557,7 @@ def prexsync_create_container_info_table(self, conn, put_timestamp,
UPDATE container_stat UPDATE container_stat
SET account = ?, container = ?, created_at = ?, id = ?, SET account = ?, container = ?, created_at = ?, id = ?,
put_timestamp = ? put_timestamp = ?
''', (self.account, self.container, normalize_timestamp(time()), ''', (self.account, self.container, Timestamp(time()).internal,
str(uuid4()), put_timestamp)) str(uuid4()), put_timestamp))
@ -1573,7 +1573,7 @@ class TestContainerBrokerBeforeXSync(ContainerBrokerMigrationMixin,
ContainerBroker.create_container_info_table = \ ContainerBroker.create_container_info_table = \
prexsync_create_container_info_table prexsync_create_container_info_table
broker = ContainerBroker(':memory:', account='a', container='c') broker = ContainerBroker(':memory:', account='a', container='c')
broker.initialize(normalize_timestamp('1'), 0) broker.initialize(Timestamp('1').internal, 0)
exc = None exc = None
with broker.get() as conn: with broker.get() as conn:
try: try:
@ -1586,7 +1586,7 @@ class TestContainerBrokerBeforeXSync(ContainerBrokerMigrationMixin,
def tearDown(self): def tearDown(self):
super(TestContainerBrokerBeforeXSync, self).tearDown() super(TestContainerBrokerBeforeXSync, self).tearDown()
broker = ContainerBroker(':memory:', account='a', container='c') broker = ContainerBroker(':memory:', account='a', container='c')
broker.initialize(normalize_timestamp('1'), 0) broker.initialize(Timestamp('1').internal, 0)
with broker.get() as conn: with broker.get() as conn:
conn.execute('SELECT x_container_sync_point1 FROM container_stat') conn.execute('SELECT x_container_sync_point1 FROM container_stat')
@ -1641,7 +1641,7 @@ def prespi_create_container_info_table(self, conn, put_timestamp,
:param put_timestamp: put timestamp :param put_timestamp: put timestamp
""" """
if put_timestamp is None: if put_timestamp is None:
put_timestamp = normalize_timestamp(0) put_timestamp = Timestamp(0).internal
conn.executescript(""" conn.executescript("""
CREATE TABLE container_stat ( CREATE TABLE container_stat (
account TEXT, account TEXT,
@ -1671,7 +1671,7 @@ def prespi_create_container_info_table(self, conn, put_timestamp,
UPDATE container_stat UPDATE container_stat
SET account = ?, container = ?, created_at = ?, id = ?, SET account = ?, container = ?, created_at = ?, id = ?,
put_timestamp = ? put_timestamp = ?
''', (self.account, self.container, normalize_timestamp(time()), ''', (self.account, self.container, Timestamp(time()).internal,
str(uuid4()), put_timestamp)) str(uuid4()), put_timestamp))
@ -1688,7 +1688,7 @@ class TestContainerBrokerBeforeSPI(ContainerBrokerMigrationMixin,
prespi_create_container_info_table prespi_create_container_info_table
broker = ContainerBroker(':memory:', account='a', container='c') broker = ContainerBroker(':memory:', account='a', container='c')
broker.initialize(normalize_timestamp('1'), 0) broker.initialize(Timestamp('1').internal, 0)
exc = None exc = None
with broker.get() as conn: with broker.get() as conn:
try: try:
@ -1701,7 +1701,7 @@ class TestContainerBrokerBeforeSPI(ContainerBrokerMigrationMixin,
def tearDown(self): def tearDown(self):
super(TestContainerBrokerBeforeSPI, self).tearDown() super(TestContainerBrokerBeforeSPI, self).tearDown()
broker = ContainerBroker(':memory:', account='a', container='c') broker = ContainerBroker(':memory:', account='a', container='c')
broker.initialize(normalize_timestamp('1'), 0) broker.initialize(Timestamp('1').internal, 0)
with broker.get() as conn: with broker.get() as conn:
conn.execute('SELECT storage_policy_index FROM container_stat') conn.execute('SELECT storage_policy_index FROM container_stat')
@ -1712,7 +1712,7 @@ class TestContainerBrokerBeforeSPI(ContainerBrokerMigrationMixin,
# initialize an un-migrated database # initialize an un-migrated database
broker = ContainerBroker(db_path, account='a', container='c') broker = ContainerBroker(db_path, account='a', container='c')
put_timestamp = normalize_timestamp(int(time())) put_timestamp = Timestamp(int(time())).internal
broker.initialize(put_timestamp, None) broker.initialize(put_timestamp, None)
with broker.get() as conn: with broker.get() as conn:
try: try:
@ -1729,7 +1729,7 @@ class TestContainerBrokerBeforeSPI(ContainerBrokerMigrationMixin,
'from object table!') 'from object table!')
# manually insert an existing row to avoid automatic migration # manually insert an existing row to avoid automatic migration
obj_put_timestamp = normalize_timestamp(time()) obj_put_timestamp = Timestamp(time()).internal
with broker.get() as conn: with broker.get() as conn:
conn.execute(''' conn.execute('''
INSERT INTO object (name, created_at, size, INSERT INTO object (name, created_at, size,
@ -1767,7 +1767,7 @@ class TestContainerBrokerBeforeSPI(ContainerBrokerMigrationMixin,
self.assertEqual(info[k], v, self.assertEqual(info[k], v,
'The value for %s was %r not %r' % ( 'The value for %s was %r not %r' % (
k, info[k], v)) k, info[k], v))
self.assert_(float(info['created_at']) > float(put_timestamp)) self.assert_(Timestamp(info['created_at']) > Timestamp(put_timestamp))
self.assertNotEqual(int(info['hash'], 16), 0) self.assertNotEqual(int(info['hash'], 16), 0)
orig_hash = info['hash'] orig_hash = info['hash']
# get_replication_info # get_replication_info
@ -1776,7 +1776,7 @@ class TestContainerBrokerBeforeSPI(ContainerBrokerMigrationMixin,
expected['count'] = expected.pop('object_count') expected['count'] = expected.pop('object_count')
for k, v in expected.items(): for k, v in expected.items():
self.assertEqual(info[k], v) self.assertEqual(info[k], v)
self.assert_(float(info['created_at']) > float(put_timestamp)) self.assert_(Timestamp(info['created_at']) > Timestamp(put_timestamp))
self.assertEqual(info['hash'], orig_hash) self.assertEqual(info['hash'], orig_hash)
self.assertEqual(info['max_row'], 1) self.assertEqual(info['max_row'], 1)
self.assertEqual(info['metadata'], '') self.assertEqual(info['metadata'], '')
@ -1839,7 +1839,7 @@ class TestContainerBrokerBeforeSPI(ContainerBrokerMigrationMixin,
# now do a PUT with a different value for storage_policy_index # now do a PUT with a different value for storage_policy_index
# which will update the DB schema as well as update policy_stats # which will update the DB schema as well as update policy_stats
# for legacy objects in the DB (those without an SPI) # for legacy objects in the DB (those without an SPI)
second_object_put_timestamp = normalize_timestamp(time()) second_object_put_timestamp = Timestamp(time()).internal
other_policy = [p for p in POLICIES if p.idx != 0][0] other_policy = [p for p in POLICIES if p.idx != 0][0]
broker.put_object('test_second', second_object_put_timestamp, broker.put_object('test_second', second_object_put_timestamp,
456, 'text/plain', 456, 'text/plain',

View File

@ -30,14 +30,15 @@ from swift.container import reconciler
from swift.container.server import gen_resp_headers from swift.container.server import gen_resp_headers
from swift.common.direct_client import ClientException from swift.common.direct_client import ClientException
from swift.common import swob from swift.common import swob
from swift.common.utils import split_path, normalize_timestamp from swift.common.utils import split_path, Timestamp
from test.unit import debug_logger, FakeRing, fake_http_connect from test.unit import debug_logger, FakeRing, fake_http_connect
from test.unit.common.middleware.helpers import FakeSwift from test.unit.common.middleware.helpers import FakeSwift
def timestamp_to_last_modified(timestamp): def timestamp_to_last_modified(timestamp):
return datetime.fromtimestamp(timestamp).strftime('%Y-%m-%dT%H:%M:%S.%f') return datetime.fromtimestamp(
float(Timestamp(timestamp))).strftime('%Y-%m-%dT%H:%M:%S.%f')
def container_resp_headers(**kwargs): def container_resp_headers(**kwargs):
@ -122,15 +123,16 @@ class FakeInternalClient(reconciler.InternalClient):
# empty container # empty container
continue continue
obj_path = container_path + '/' + obj_name obj_path = container_path + '/' + obj_name
headers = {'X-Timestamp': normalize_timestamp(timestamp)} ts = Timestamp(timestamp)
headers = {'X-Timestamp': ts.normal,
'X-Backend-Timestamp': ts.internal}
# register object response # register object response
self.app.storage_policy[storage_policy_index].register( self.app.storage_policy[storage_policy_index].register(
'GET', obj_path, swob.HTTPOk, headers) 'GET', obj_path, swob.HTTPOk, headers)
self.app.storage_policy[storage_policy_index].register( self.app.storage_policy[storage_policy_index].register(
'DELETE', obj_path, swob.HTTPNoContent, {}) 'DELETE', obj_path, swob.HTTPNoContent, {})
# container listing entry # container listing entry
last_modified = timestamp_to_last_modified( last_modified = timestamp_to_last_modified(timestamp)
float(timestamp))
obj_data = { obj_data = {
'bytes': 0, 'bytes': 0,
# listing data is unicode # listing data is unicode
@ -183,7 +185,7 @@ class TestReconcilerUtils(unittest.TestCase):
def test_parse_raw_obj(self): def test_parse_raw_obj(self):
got = reconciler.parse_raw_obj({ got = reconciler.parse_raw_obj({
'name': "2:/AUTH_bob/con/obj", 'name': "2:/AUTH_bob/con/obj",
'hash': normalize_timestamp(2017551.49350), 'hash': Timestamp(2017551.49350).internal,
'last_modified': timestamp_to_last_modified(2017551.49352), 'last_modified': timestamp_to_last_modified(2017551.49352),
'content_type': 'application/x-delete', 'content_type': 'application/x-delete',
}) })
@ -197,7 +199,7 @@ class TestReconcilerUtils(unittest.TestCase):
got = reconciler.parse_raw_obj({ got = reconciler.parse_raw_obj({
'name': "1:/AUTH_bob/con/obj", 'name': "1:/AUTH_bob/con/obj",
'hash': normalize_timestamp(1234.20190), 'hash': Timestamp(1234.20190).internal,
'last_modified': timestamp_to_last_modified(1234.20192), 'last_modified': timestamp_to_last_modified(1234.20192),
'content_type': 'application/x-put', 'content_type': 'application/x-put',
}) })
@ -212,7 +214,7 @@ class TestReconcilerUtils(unittest.TestCase):
# negative test # negative test
obj_info = { obj_info = {
'name': "1:/AUTH_bob/con/obj", 'name': "1:/AUTH_bob/con/obj",
'hash': normalize_timestamp(1234.20190), 'hash': Timestamp(1234.20190).internal,
'last_modified': timestamp_to_last_modified(1234.20192), 'last_modified': timestamp_to_last_modified(1234.20192),
} }
self.assertRaises(ValueError, reconciler.parse_raw_obj, obj_info) self.assertRaises(ValueError, reconciler.parse_raw_obj, obj_info)
@ -235,15 +237,15 @@ class TestReconcilerUtils(unittest.TestCase):
mock_path = 'swift.container.reconciler.direct_head_container' mock_path = 'swift.container.reconciler.direct_head_container'
stub_resp_headers = [ stub_resp_headers = [
container_resp_headers( container_resp_headers(
status_changed_at=normalize_timestamp(ts.next()), status_changed_at=Timestamp(ts.next()).internal,
storage_policy_index=0, storage_policy_index=0,
), ),
container_resp_headers( container_resp_headers(
status_changed_at=normalize_timestamp(ts.next()), status_changed_at=Timestamp(ts.next()).internal,
storage_policy_index=1, storage_policy_index=1,
), ),
container_resp_headers( container_resp_headers(
status_changed_at=normalize_timestamp(ts.next()), status_changed_at=Timestamp(ts.next()).internal,
storage_policy_index=0, storage_policy_index=0,
), ),
] ]
@ -278,7 +280,7 @@ class TestReconcilerUtils(unittest.TestCase):
'Container Server blew up', 'Container Server blew up',
http_status=500, http_reason='Server Error', http_status=500, http_reason='Server Error',
http_headers=container_resp_headers( http_headers=container_resp_headers(
status_changed_at=normalize_timestamp(0), status_changed_at=Timestamp(0).internal,
storage_policy_index=0, storage_policy_index=0,
), ),
), ),
@ -295,11 +297,11 @@ class TestReconcilerUtils(unittest.TestCase):
mock_path = 'swift.container.reconciler.direct_head_container' mock_path = 'swift.container.reconciler.direct_head_container'
stub_resp_headers = [ stub_resp_headers = [
container_resp_headers( container_resp_headers(
status_changed_at=normalize_timestamp(ts.next()), status_changed_at=Timestamp(ts.next()).internal,
storage_policy_index=1, storage_policy_index=1,
), ),
container_resp_headers( container_resp_headers(
status_changed_at=normalize_timestamp(ts.next()), status_changed_at=Timestamp(ts.next()).internal,
storage_policy_index=0, storage_policy_index=0,
), ),
socket.error(errno.ECONNREFUSED, os.strerror(errno.ECONNREFUSED)), socket.error(errno.ECONNREFUSED, os.strerror(errno.ECONNREFUSED)),
@ -316,7 +318,7 @@ class TestReconcilerUtils(unittest.TestCase):
mock_path = 'swift.container.reconciler.direct_head_container' mock_path = 'swift.container.reconciler.direct_head_container'
stub_resp_headers = [ stub_resp_headers = [
container_resp_headers( container_resp_headers(
status_changed_at=normalize_timestamp(ts.next()), status_changed_at=Timestamp(ts.next()).internal,
storage_policy_index=0, storage_policy_index=0,
), ),
socket.error(errno.ECONNREFUSED, os.strerror(errno.ECONNREFUSED)), socket.error(errno.ECONNREFUSED, os.strerror(errno.ECONNREFUSED)),
@ -324,7 +326,7 @@ class TestReconcilerUtils(unittest.TestCase):
'Container Server blew up', 'Container Server blew up',
http_status=500, http_reason='Server Error', http_status=500, http_reason='Server Error',
http_headers=container_resp_headers( http_headers=container_resp_headers(
status_changed_at=normalize_timestamp(ts.next()), status_changed_at=Timestamp(ts.next()).internal,
storage_policy_index=1, storage_policy_index=1,
), ),
), ),
@ -339,7 +341,7 @@ class TestReconcilerUtils(unittest.TestCase):
def test_get_container_policy_index_for_deleted(self): def test_get_container_policy_index_for_deleted(self):
mock_path = 'swift.container.reconciler.direct_head_container' mock_path = 'swift.container.reconciler.direct_head_container'
headers = container_resp_headers( headers = container_resp_headers(
status_changed_at=normalize_timestamp(time.time()), status_changed_at=Timestamp(time.time()).internal,
storage_policy_index=1, storage_policy_index=1,
) )
stub_resp_headers = [ stub_resp_headers = [
@ -484,15 +486,15 @@ class TestReconcilerUtils(unittest.TestCase):
mock_path = 'swift.container.reconciler.direct_head_container' mock_path = 'swift.container.reconciler.direct_head_container'
stub_resp_headers = [ stub_resp_headers = [
container_resp_headers( container_resp_headers(
status_changed_at=normalize_timestamp(ts.next()), status_changed_at=Timestamp(ts.next()).internal,
storage_policy_index=0, storage_policy_index=0,
), ),
container_resp_headers( container_resp_headers(
status_changed_at=normalize_timestamp(ts.next()), status_changed_at=Timestamp(ts.next()).internal,
storage_policy_index=1, storage_policy_index=1,
), ),
container_resp_headers( container_resp_headers(
status_changed_at=normalize_timestamp(ts.next()), status_changed_at=Timestamp(ts.next()).internal,
storage_policy_index=0, storage_policy_index=0,
), ),
] ]
@ -536,8 +538,8 @@ class TestReconcilerUtils(unittest.TestCase):
'partition': partition, 'method': method, 'path': path, 'partition': partition, 'method': method, 'path': path,
'headers': headers, 'query_string': query_string}) 'headers': headers, 'query_string': query_string})
x_timestamp = normalize_timestamp(time.time()) x_timestamp = Timestamp(time.time())
headers = {'x-timestamp': x_timestamp} headers = {'x-timestamp': x_timestamp.internal}
fake_hc = fake_http_connect(200, 200, 200, give_connect=test_connect) fake_hc = fake_http_connect(200, 200, 200, give_connect=test_connect)
with mock.patch(mock_path, fake_hc): with mock.patch(mock_path, fake_hc):
reconciler.direct_delete_container_entry( reconciler.direct_delete_container_entry(
@ -620,7 +622,7 @@ class TestReconcilerUtils(unittest.TestCase):
'headers': headers, 'query_string': query_string}) 'headers': headers, 'query_string': query_string})
fake_hc = fake_http_connect(200, 200, 200, give_connect=test_connect) fake_hc = fake_http_connect(200, 200, 200, give_connect=test_connect)
now = float(normalize_timestamp(time.time())) now = time.time()
with contextlib.nested( with contextlib.nested(
mock.patch(mock_path, fake_hc), mock.patch(mock_path, fake_hc),
mock.patch('swift.container.reconciler.time.time', mock.patch('swift.container.reconciler.time.time',
@ -639,7 +641,7 @@ class TestReconcilerUtils(unittest.TestCase):
for args in connect_args: for args in connect_args:
self.assertEqual(args['headers']['X-Timestamp'], self.assertEqual(args['headers']['X-Timestamp'],
normalize_timestamp(now)) Timestamp(now).internal)
self.assertEqual(args['headers']['X-Etag'], '5948918.63946') self.assertEqual(args['headers']['X-Etag'], '5948918.63946')
self.assertEqual(args['path'], self.assertEqual(args['path'],
'/.misplaced_objects/5947200/17:/a/c/o') '/.misplaced_objects/5947200/17:/a/c/o')
@ -820,7 +822,7 @@ class TestReconciler(unittest.TestCase):
# we DELETE the object from the wrong place with source_ts + offset 1 # we DELETE the object from the wrong place with source_ts + offset 1
# timestamp to make sure the change takes effect # timestamp to make sure the change takes effect
self.assertEqual(delete_headers.get('X-Timestamp'), self.assertEqual(delete_headers.get('X-Timestamp'),
normalize_timestamp(3618.84188)) Timestamp(3618.84187, offset=1).internal)
# and pop the queue for that one # and pop the queue for that one
self.assertEqual(self.reconciler.stats['pop_queue'], 1) self.assertEqual(self.reconciler.stats['pop_queue'], 1)
self.assertEqual(deleted_container_entries, [( self.assertEqual(deleted_container_entries, [(
@ -884,14 +886,14 @@ class TestReconciler(unittest.TestCase):
put_headers = self.fake_swift.storage_policy[0].headers[1] put_headers = self.fake_swift.storage_policy[0].headers[1]
# we PUT the object in the right place with q_ts + offset 2 # we PUT the object in the right place with q_ts + offset 2
self.assertEqual(put_headers.get('X-Timestamp'), self.assertEqual(put_headers.get('X-Timestamp'),
normalize_timestamp('3618.84189')) Timestamp(3618.84187, offset=2))
# cleans up the old # cleans up the old
self.assertEqual(self.reconciler.stats['cleanup_attempt'], 1) self.assertEqual(self.reconciler.stats['cleanup_attempt'], 1)
self.assertEqual(self.reconciler.stats['cleanup_success'], 1) self.assertEqual(self.reconciler.stats['cleanup_success'], 1)
# we DELETE the object from the wrong place with source_ts + offset 1 # we DELETE the object from the wrong place with source_ts + offset 1
# timestamp to make sure the change takes effect # timestamp to make sure the change takes effect
self.assertEqual(delete_headers.get('X-Timestamp'), self.assertEqual(delete_headers.get('X-Timestamp'),
normalize_timestamp('3618.84188')) Timestamp(3618.84187, offset=1))
# and when we're done, we pop the entry from the queue # and when we're done, we pop the entry from the queue
self.assertEqual(self.reconciler.stats['pop_queue'], 1) self.assertEqual(self.reconciler.stats['pop_queue'], 1)
self.assertEqual(deleted_container_entries, self.assertEqual(deleted_container_entries,
@ -931,14 +933,14 @@ class TestReconciler(unittest.TestCase):
put_headers = self.fake_swift.storage_policy[1].headers[1] put_headers = self.fake_swift.storage_policy[1].headers[1]
# we PUT the object in the right place with q_ts + offset 2 # we PUT the object in the right place with q_ts + offset 2
self.assertEqual(put_headers.get('X-Timestamp'), self.assertEqual(put_headers.get('X-Timestamp'),
normalize_timestamp('3618.84189')) Timestamp(3618.84187, offset=2).internal)
# cleans up the old # cleans up the old
self.assertEqual(self.reconciler.stats['cleanup_attempt'], 1) self.assertEqual(self.reconciler.stats['cleanup_attempt'], 1)
self.assertEqual(self.reconciler.stats['cleanup_success'], 1) self.assertEqual(self.reconciler.stats['cleanup_success'], 1)
# we DELETE the object from the wrong place with source_ts + offset 1 # we DELETE the object from the wrong place with source_ts + offset 1
# timestamp to make sure the change takes effect # timestamp to make sure the change takes effect
self.assertEqual(delete_headers.get('X-Timestamp'), self.assertEqual(delete_headers.get('X-Timestamp'),
normalize_timestamp('3618.84188')) Timestamp(3618.84187, offset=1).internal)
# and when we're done, we pop the entry from the queue # and when we're done, we pop the entry from the queue
self.assertEqual(self.reconciler.stats['pop_queue'], 1) self.assertEqual(self.reconciler.stats['pop_queue'], 1)
self.assertEqual(deleted_container_entries, self.assertEqual(deleted_container_entries,
@ -987,14 +989,14 @@ class TestReconciler(unittest.TestCase):
put_headers = self.fake_swift.storage_policy[0].headers[1] put_headers = self.fake_swift.storage_policy[0].headers[1]
# we PUT the object in the right place with q_ts + offset 2 # we PUT the object in the right place with q_ts + offset 2
self.assertEqual(put_headers.get('X-Timestamp'), self.assertEqual(put_headers.get('X-Timestamp'),
normalize_timestamp('3618.84189')) Timestamp(3618.84187, offset=2).internal)
# cleans up the old # cleans up the old
self.assertEqual(self.reconciler.stats['cleanup_attempt'], 1) self.assertEqual(self.reconciler.stats['cleanup_attempt'], 1)
self.assertEqual(self.reconciler.stats['cleanup_success'], 1) self.assertEqual(self.reconciler.stats['cleanup_success'], 1)
# we DELETE the object from the wrong place with source_ts + offset 1 # we DELETE the object from the wrong place with source_ts + offset 1
# timestamp to make sure the change takes effect # timestamp to make sure the change takes effect
self.assertEqual(delete_headers.get('X-Timestamp'), self.assertEqual(delete_headers.get('X-Timestamp'),
normalize_timestamp('3618.84188')) Timestamp(3618.84187, offset=1).internal)
self.assertEqual( self.assertEqual(
delete_headers.get('X-Backend-Storage-Policy-Index'), '1') delete_headers.get('X-Backend-Storage-Policy-Index'), '1')
# and when we're done, we pop the entry from the queue # and when we're done, we pop the entry from the queue
@ -1005,18 +1007,18 @@ class TestReconciler(unittest.TestCase):
self.assertEqual(self.reconciler.stats['success'], 1) self.assertEqual(self.reconciler.stats['success'], 1)
def test_object_delete(self): def test_object_delete(self):
q_ts = float(normalize_timestamp(time.time())) q_ts = time.time()
self._mock_listing({ self._mock_listing({
(None, "/.misplaced_objects/3600/1:/AUTH_bob/c/o1"): ( (None, "/.misplaced_objects/3600/1:/AUTH_bob/c/o1"): (
normalize_timestamp(q_ts), 'application/x-delete'), Timestamp(q_ts).internal, 'application/x-delete'),
# object exists in "correct" storage policy - slightly older # object exists in "correct" storage policy - slightly older
(0, "/AUTH_bob/c/o1"): normalize_timestamp(q_ts - 1), (0, "/AUTH_bob/c/o1"): Timestamp(q_ts - 1).internal,
}) })
self._mock_oldest_spi({'c': 0}) self._mock_oldest_spi({'c': 0})
# the tombstone exists in the enqueued storage policy # the tombstone exists in the enqueued storage policy
self.fake_swift.storage_policy[1].register( self.fake_swift.storage_policy[1].register(
'GET', '/v1/AUTH_bob/c/o1', swob.HTTPNotFound, 'GET', '/v1/AUTH_bob/c/o1', swob.HTTPNotFound,
{'X-Backend-Timestamp': normalize_timestamp(q_ts)}) {'X-Backend-Timestamp': Timestamp(q_ts).internal})
deleted_container_entries = self._run_once() deleted_container_entries = self._run_once()
# found a misplaced object # found a misplaced object
@ -1044,14 +1046,14 @@ class TestReconciler(unittest.TestCase):
reconcile_headers = self.fake_swift.storage_policy[0].headers[1] reconcile_headers = self.fake_swift.storage_policy[0].headers[1]
# we DELETE the object in the right place with q_ts + offset 2 # we DELETE the object in the right place with q_ts + offset 2
self.assertEqual(reconcile_headers.get('X-Timestamp'), self.assertEqual(reconcile_headers.get('X-Timestamp'),
normalize_timestamp(q_ts + 0.00002)) Timestamp(q_ts, offset=2).internal)
# cleans up the old # cleans up the old
self.assertEqual(self.reconciler.stats['cleanup_attempt'], 1) self.assertEqual(self.reconciler.stats['cleanup_attempt'], 1)
self.assertEqual(self.reconciler.stats['cleanup_success'], 1) self.assertEqual(self.reconciler.stats['cleanup_success'], 1)
# we DELETE the object from the wrong place with source_ts + offset 1 # we DELETE the object from the wrong place with source_ts + offset 1
# timestamp to make sure the change takes effect # timestamp to make sure the change takes effect
self.assertEqual(delete_headers.get('X-Timestamp'), self.assertEqual(delete_headers.get('X-Timestamp'),
normalize_timestamp(q_ts + 0.00001)) Timestamp(q_ts, offset=1))
# and when we're done, we pop the entry from the queue # and when we're done, we pop the entry from the queue
self.assertEqual(self.reconciler.stats['pop_queue'], 1) self.assertEqual(self.reconciler.stats['pop_queue'], 1)
self.assertEqual(deleted_container_entries, self.assertEqual(deleted_container_entries,
@ -1117,13 +1119,13 @@ class TestReconciler(unittest.TestCase):
# .. with source timestamp + offset 2 # .. with source timestamp + offset 2
put_headers = self.fake_swift.storage_policy[0].headers[1] put_headers = self.fake_swift.storage_policy[0].headers[1]
self.assertEqual(put_headers.get('X-Timestamp'), self.assertEqual(put_headers.get('X-Timestamp'),
normalize_timestamp(3600.234587)) Timestamp(3600.234567, offset=2))
# src object is cleaned up # src object is cleaned up
self.assertEqual(self.reconciler.stats['cleanup_attempt'], 1) self.assertEqual(self.reconciler.stats['cleanup_attempt'], 1)
self.assertEqual(self.reconciler.stats['cleanup_success'], 1) self.assertEqual(self.reconciler.stats['cleanup_success'], 1)
# ... with q_ts + offset 1 # ... with q_ts + offset 1
self.assertEqual(delete_headers.get('X-Timestamp'), self.assertEqual(delete_headers.get('X-Timestamp'),
normalize_timestamp(3600.123466)) Timestamp(3600.123456, offset=1))
# and queue is popped # and queue is popped
self.assertEqual(self.reconciler.stats['pop_queue'], 1) self.assertEqual(self.reconciler.stats['pop_queue'], 1)
self.assertEqual(deleted_container_entries, self.assertEqual(deleted_container_entries,
@ -1132,7 +1134,7 @@ class TestReconciler(unittest.TestCase):
def test_object_move_src_object_older_than_queue_entry(self): def test_object_move_src_object_older_than_queue_entry(self):
# should be some sort of retry case # should be some sort of retry case
q_ts = float(normalize_timestamp(time.time())) q_ts = time.time()
container = str(int(q_ts // 3600 * 3600)) container = str(int(q_ts // 3600 * 3600))
q_path = '.misplaced_objects/%s' % container q_path = '.misplaced_objects/%s' % container
self._mock_listing({ self._mock_listing({
@ -1169,7 +1171,7 @@ class TestReconciler(unittest.TestCase):
def test_src_object_unavailable_with_slightly_newer_tombstone(self): def test_src_object_unavailable_with_slightly_newer_tombstone(self):
# should be some sort of retry case # should be some sort of retry case
q_ts = float(normalize_timestamp(time.time())) q_ts = float(Timestamp(time.time()))
container = str(int(q_ts // 3600 * 3600)) container = str(int(q_ts // 3600 * 3600))
q_path = '.misplaced_objects/%s' % container q_path = '.misplaced_objects/%s' % container
self._mock_listing({ self._mock_listing({
@ -1178,7 +1180,7 @@ class TestReconciler(unittest.TestCase):
self._mock_oldest_spi({'c': 0}) self._mock_oldest_spi({'c': 0})
self.fake_swift.storage_policy[1].register( self.fake_swift.storage_policy[1].register(
'GET', '/v1/AUTH_bob/c/o1', swob.HTTPNotFound, 'GET', '/v1/AUTH_bob/c/o1', swob.HTTPNotFound,
{'X-Backend-Timestamp': normalize_timestamp(q_ts + 0.00002)}) {'X-Backend-Timestamp': Timestamp(q_ts, offset=2).internal})
deleted_container_entries = self._run_once() deleted_container_entries = self._run_once()
# found a misplaced object # found a misplaced object
@ -1208,7 +1210,7 @@ class TestReconciler(unittest.TestCase):
def test_src_object_unavailable_server_error(self): def test_src_object_unavailable_server_error(self):
# should be some sort of retry case # should be some sort of retry case
q_ts = float(normalize_timestamp(time.time())) q_ts = float(Timestamp(time.time()))
container = str(int(q_ts // 3600 * 3600)) container = str(int(q_ts // 3600 * 3600))
q_path = '.misplaced_objects/%s' % container q_path = '.misplaced_objects/%s' % container
self._mock_listing({ self._mock_listing({
@ -1248,7 +1250,7 @@ class TestReconciler(unittest.TestCase):
# setup the cluster # setup the cluster
self._mock_listing({ self._mock_listing({
(None, "/.misplaced_objects/3600/1:/AUTH_bob/c/o1"): 3600.123456, (None, "/.misplaced_objects/3600/1:/AUTH_bob/c/o1"): 3600.123456,
(1, '/AUTH_bob/c/o1'): 3600.234567, # slightly newer (1, '/AUTH_bob/c/o1'): 3600.123457, # slightly newer
}) })
self._mock_oldest_spi({'c': 0}) # destination self._mock_oldest_spi({'c': 0}) # destination
@ -1283,12 +1285,12 @@ class TestReconciler(unittest.TestCase):
# .. with source timestamp + offset 2 # .. with source timestamp + offset 2
put_headers = self.fake_swift.storage_policy[0].headers[1] put_headers = self.fake_swift.storage_policy[0].headers[1]
self.assertEqual(put_headers.get('X-Timestamp'), self.assertEqual(put_headers.get('X-Timestamp'),
normalize_timestamp(3600.234587)) Timestamp(3600.123457, offset=2))
# we try to cleanup # we try to cleanup
self.assertEqual(self.reconciler.stats['cleanup_attempt'], 1) self.assertEqual(self.reconciler.stats['cleanup_attempt'], 1)
# ... with q_ts + offset 1 # ... with q_ts + offset 1
self.assertEqual(delete_headers.get('X-Timestamp'), self.assertEqual(delete_headers.get('X-Timestamp'),
normalize_timestamp(3600.123466)) Timestamp(3600.12346, offset=1))
# but cleanup fails! # but cleanup fails!
self.assertEqual(self.reconciler.stats['cleanup_failed'], 1) self.assertEqual(self.reconciler.stats['cleanup_failed'], 1)
# so the queue is not popped # so the queue is not popped
@ -1366,7 +1368,7 @@ class TestReconciler(unittest.TestCase):
self.assertEqual(self.reconciler.stats['cleanup_attempt'], 1) self.assertEqual(self.reconciler.stats['cleanup_attempt'], 1)
self.assertEqual(self.reconciler.stats['cleanup_success'], 1) self.assertEqual(self.reconciler.stats['cleanup_success'], 1)
self.assertEqual(delete_headers.get('X-Timestamp'), self.assertEqual(delete_headers.get('X-Timestamp'),
normalize_timestamp(3679.20191)) Timestamp(3679.2019, offset=1))
# and wipe our hands of it # and wipe our hands of it
self.assertEqual(self.reconciler.stats['pop_queue'], 1) self.assertEqual(self.reconciler.stats['pop_queue'], 1)
self.assertEqual(deleted_container_entries, self.assertEqual(deleted_container_entries,
@ -1407,7 +1409,7 @@ class TestReconciler(unittest.TestCase):
self.assertEqual(self.reconciler.stats['cleanup_success'], 1) self.assertEqual(self.reconciler.stats['cleanup_success'], 1)
self.assertEqual(delete_headers.get('X-Timestamp'), self.assertEqual(delete_headers.get('X-Timestamp'),
normalize_timestamp('3679.20191')) Timestamp(3679.2019, offset=1))
# and since we cleaned up the old object, so this counts as done # and since we cleaned up the old object, so this counts as done
self.assertEqual(self.reconciler.stats['pop_queue'], 1) self.assertEqual(self.reconciler.stats['pop_queue'], 1)
self.assertEqual(deleted_container_entries, self.assertEqual(deleted_container_entries,
@ -1448,13 +1450,13 @@ class TestReconciler(unittest.TestCase):
# ... with a q_ts + offset 2 # ... with a q_ts + offset 2
put_headers = self.fake_swift.storage_policy[0].headers[1] put_headers = self.fake_swift.storage_policy[0].headers[1]
self.assertEqual(put_headers.get('X-Timestamp'), self.assertEqual(put_headers.get('X-Timestamp'),
normalize_timestamp(36123.38395)) Timestamp(36123.38393, offset=2))
# then clean the dark matter # then clean the dark matter
self.assertEqual(self.reconciler.stats['cleanup_attempt'], 1) self.assertEqual(self.reconciler.stats['cleanup_attempt'], 1)
self.assertEqual(self.reconciler.stats['cleanup_success'], 1) self.assertEqual(self.reconciler.stats['cleanup_success'], 1)
# ... with a q_ts + offset 1 # ... with a q_ts + offset 1
self.assertEqual(delete_headers.get('X-Timestamp'), self.assertEqual(delete_headers.get('X-Timestamp'),
normalize_timestamp(36123.38394)) Timestamp(36123.38393, offset=1))
# and pop the queue # and pop the queue
self.assertEqual(self.reconciler.stats['pop_queue'], 1) self.assertEqual(self.reconciler.stats['pop_queue'], 1)
@ -1499,7 +1501,7 @@ class TestReconciler(unittest.TestCase):
put_headers = self.fake_swift.storage_policy[0].headers[1] put_headers = self.fake_swift.storage_policy[0].headers[1]
# ...with q_ts + offset 2 (20-microseconds) # ...with q_ts + offset 2 (20-microseconds)
self.assertEqual(put_headers.get('X-Timestamp'), self.assertEqual(put_headers.get('X-Timestamp'),
normalize_timestamp(36123.383945)) Timestamp(36123.383925, offset=2))
# but it failed # but it failed
self.assertEqual(self.reconciler.stats['copy_success'], 0) self.assertEqual(self.reconciler.stats['copy_success'], 0)
self.assertEqual(self.reconciler.stats['copy_failed'], 1) self.assertEqual(self.reconciler.stats['copy_failed'], 1)
@ -1550,7 +1552,7 @@ class TestReconciler(unittest.TestCase):
put_headers = self.fake_swift.storage_policy[0].headers[1] put_headers = self.fake_swift.storage_policy[0].headers[1]
# ...with q_ts + offset 2 (20-microseconds) # ...with q_ts + offset 2 (20-microseconds)
self.assertEqual(put_headers.get('X-Timestamp'), self.assertEqual(put_headers.get('X-Timestamp'),
normalize_timestamp(36123.383945)) Timestamp(36123.383925, offset=2))
# but it blows up hard # but it blows up hard
self.assertEqual(self.reconciler.stats['unhandled_error'], 1) self.assertEqual(self.reconciler.stats['unhandled_error'], 1)
# so we don't cleanup # so we don't cleanup
@ -1561,7 +1563,7 @@ class TestReconciler(unittest.TestCase):
self.assertEqual(self.reconciler.stats['retry'], 1) self.assertEqual(self.reconciler.stats['retry'], 1)
def test_object_move_no_such_object_no_tombstone_recent(self): def test_object_move_no_such_object_no_tombstone_recent(self):
q_ts = float(normalize_timestamp(time.time())) q_ts = float(Timestamp(time.time()))
container = str(int(q_ts // 3600 * 3600)) container = str(int(q_ts // 3600 * 3600))
q_path = '.misplaced_objects/%s' % container q_path = '.misplaced_objects/%s' % container
@ -1593,7 +1595,7 @@ class TestReconciler(unittest.TestCase):
self.assertEqual(deleted_container_entries, []) self.assertEqual(deleted_container_entries, [])
def test_object_move_no_such_object_no_tombstone_ancient(self): def test_object_move_no_such_object_no_tombstone_ancient(self):
queue_ts = float(normalize_timestamp(time.time())) - \ queue_ts = float(Timestamp(time.time())) - \
self.reconciler.reclaim_age * 1.1 self.reconciler.reclaim_age * 1.1
container = str(int(queue_ts // 3600 * 3600)) container = str(int(queue_ts // 3600 * 3600))
@ -1630,8 +1632,7 @@ class TestReconciler(unittest.TestCase):
[('.misplaced_objects', container, '1:/AUTH_jeb/c/o1')]) [('.misplaced_objects', container, '1:/AUTH_jeb/c/o1')])
def test_delete_old_empty_queue_containers(self): def test_delete_old_empty_queue_containers(self):
ts = float(normalize_timestamp(time.time())) - \ ts = time.time() - self.reconciler.reclaim_age * 1.1
self.reconciler.reclaim_age * 1.1
container = str(int(ts // 3600 * 3600)) container = str(int(ts // 3600 * 3600))
older_ts = ts - 3600 older_ts = ts - 3600
older_container = str(int(older_ts // 3600 * 3600)) older_container = str(int(older_ts // 3600 * 3600))

View File

@ -26,7 +26,7 @@ from swift.common import db_replicator
from swift.container import replicator, backend, server from swift.container import replicator, backend, server
from swift.container.reconciler import ( from swift.container.reconciler import (
MISPLACED_OBJECTS_ACCOUNT, get_reconciler_container_name) MISPLACED_OBJECTS_ACCOUNT, get_reconciler_container_name)
from swift.common.utils import normalize_timestamp from swift.common.utils import Timestamp
from swift.common.storage_policy import POLICIES from swift.common.storage_policy import POLICIES
from test.unit.common import test_db_replicator from test.unit.common import test_db_replicator
@ -44,18 +44,18 @@ class TestReplicator(unittest.TestCase):
def test_report_up_to_date(self): def test_report_up_to_date(self):
repl = replicator.ContainerReplicator({}) repl = replicator.ContainerReplicator({})
info = {'put_timestamp': normalize_timestamp(1), info = {'put_timestamp': Timestamp(1).internal,
'delete_timestamp': normalize_timestamp(0), 'delete_timestamp': Timestamp(0).internal,
'object_count': 0, 'object_count': 0,
'bytes_used': 0, 'bytes_used': 0,
'reported_put_timestamp': normalize_timestamp(1), 'reported_put_timestamp': Timestamp(1).internal,
'reported_delete_timestamp': normalize_timestamp(0), 'reported_delete_timestamp': Timestamp(0).internal,
'reported_object_count': 0, 'reported_object_count': 0,
'reported_bytes_used': 0} 'reported_bytes_used': 0}
self.assertTrue(repl.report_up_to_date(info)) self.assertTrue(repl.report_up_to_date(info))
info['delete_timestamp'] = normalize_timestamp(2) info['delete_timestamp'] = Timestamp(2).internal
self.assertFalse(repl.report_up_to_date(info)) self.assertFalse(repl.report_up_to_date(info))
info['reported_delete_timestamp'] = normalize_timestamp(2) info['reported_delete_timestamp'] = Timestamp(2).internal
self.assertTrue(repl.report_up_to_date(info)) self.assertTrue(repl.report_up_to_date(info))
info['object_count'] = 1 info['object_count'] = 1
self.assertFalse(repl.report_up_to_date(info)) self.assertFalse(repl.report_up_to_date(info))
@ -65,9 +65,9 @@ class TestReplicator(unittest.TestCase):
self.assertFalse(repl.report_up_to_date(info)) self.assertFalse(repl.report_up_to_date(info))
info['reported_bytes_used'] = 1 info['reported_bytes_used'] = 1
self.assertTrue(repl.report_up_to_date(info)) self.assertTrue(repl.report_up_to_date(info))
info['put_timestamp'] = normalize_timestamp(3) info['put_timestamp'] = Timestamp(3).internal
self.assertFalse(repl.report_up_to_date(info)) self.assertFalse(repl.report_up_to_date(info))
info['reported_put_timestamp'] = normalize_timestamp(3) info['reported_put_timestamp'] = Timestamp(3).internal
self.assertTrue(repl.report_up_to_date(info)) self.assertTrue(repl.report_up_to_date(info))
@ -328,21 +328,21 @@ class TestReplicatorSync(test_db_replicator.TestReplicatorSync):
self.assertTrue(remote_broker.is_deleted()) self.assertTrue(remote_broker.is_deleted())
info = broker.get_info() info = broker.get_info()
remote_info = remote_broker.get_info() remote_info = remote_broker.get_info()
self.assert_(float(remote_info['status_changed_at']) > self.assert_(Timestamp(remote_info['status_changed_at']) >
float(remote_info['put_timestamp']), Timestamp(remote_info['put_timestamp']),
'remote status_changed_at (%s) is not ' 'remote status_changed_at (%s) is not '
'greater than put_timestamp (%s)' % ( 'greater than put_timestamp (%s)' % (
remote_info['status_changed_at'], remote_info['status_changed_at'],
remote_info['put_timestamp'])) remote_info['put_timestamp']))
self.assert_(float(remote_info['status_changed_at']) > self.assert_(Timestamp(remote_info['status_changed_at']) >
float(info['status_changed_at']), Timestamp(info['status_changed_at']),
'remote status_changed_at (%s) is not ' 'remote status_changed_at (%s) is not '
'greater than local status_changed_at (%s)' % ( 'greater than local status_changed_at (%s)' % (
remote_info['status_changed_at'], remote_info['status_changed_at'],
info['status_changed_at'])) info['status_changed_at']))
def test_sync_bogus_db_quarantines(self): def test_sync_bogus_db_quarantines(self):
ts = (normalize_timestamp(t) for t in ts = (Timestamp(t).internal for t in
itertools.count(int(time.time()))) itertools.count(int(time.time())))
policy = random.choice(list(POLICIES)) policy = random.choice(list(POLICIES))
@ -667,22 +667,21 @@ class TestReplicatorSync(test_db_replicator.TestReplicatorSync):
remote_broker.update_status_changed_at(remote_recreate_timestamp) remote_broker.update_status_changed_at(remote_recreate_timestamp)
def test_sync_to_remote_with_misplaced(self): def test_sync_to_remote_with_misplaced(self):
ts = itertools.count(int(time.time())) ts = (Timestamp(t).internal for t in
itertools.count(int(time.time())))
# create "local" broker # create "local" broker
policy = random.choice(list(POLICIES)) policy = random.choice(list(POLICIES))
broker = self._get_broker('a', 'c', node_index=0) broker = self._get_broker('a', 'c', node_index=0)
broker.initialize(normalize_timestamp(ts.next()), broker.initialize(ts.next(), policy.idx)
policy.idx)
# create "remote" broker # create "remote" broker
remote_policy = random.choice([p for p in POLICIES if p is not remote_policy = random.choice([p for p in POLICIES if p is not
policy]) policy])
remote_broker = self._get_broker('a', 'c', node_index=1) remote_broker = self._get_broker('a', 'c', node_index=1)
remote_broker.initialize(normalize_timestamp(ts.next()), remote_broker.initialize(ts.next(), remote_policy.idx)
remote_policy.idx)
# add misplaced row to remote_broker # add misplaced row to remote_broker
remote_broker.put_object( remote_broker.put_object(
'/a/c/o', normalize_timestamp(ts.next()), 0, 'content-type', '/a/c/o', ts.next(), 0, 'content-type',
'etag', storage_policy_index=remote_broker.storage_policy_index) 'etag', storage_policy_index=remote_broker.storage_policy_index)
# since this row matches policy index or remote, it shows up in count # since this row matches policy index or remote, it shows up in count
self.assertEqual(remote_broker.get_info()['object_count'], 1) self.assertEqual(remote_broker.get_info()['object_count'], 1)
@ -716,19 +715,18 @@ class TestReplicatorSync(test_db_replicator.TestReplicatorSync):
self.assertEqual(info[key], value) self.assertEqual(info[key], value)
def test_misplaced_rows_replicate_and_enqueue(self): def test_misplaced_rows_replicate_and_enqueue(self):
ts = itertools.count(int(time.time())) ts = (Timestamp(t).internal for t in
itertools.count(int(time.time())))
policy = random.choice(list(POLICIES)) policy = random.choice(list(POLICIES))
broker = self._get_broker('a', 'c', node_index=0) broker = self._get_broker('a', 'c', node_index=0)
broker.initialize(normalize_timestamp(ts.next()), broker.initialize(ts.next(), policy.idx)
policy.idx)
remote_policy = random.choice([p for p in POLICIES if p is not remote_policy = random.choice([p for p in POLICIES if p is not
policy]) policy])
remote_broker = self._get_broker('a', 'c', node_index=1) remote_broker = self._get_broker('a', 'c', node_index=1)
remote_broker.initialize(normalize_timestamp(ts.next()), remote_broker.initialize(ts.next(), remote_policy.idx)
remote_policy.idx)
# add a misplaced row to *local* broker # add a misplaced row to *local* broker
obj_put_timestamp = normalize_timestamp(ts.next()) obj_put_timestamp = ts.next()
broker.put_object( broker.put_object(
'o', obj_put_timestamp, 0, 'content-type', 'o', obj_put_timestamp, 0, 'content-type',
'etag', storage_policy_index=remote_policy.idx) 'etag', storage_policy_index=remote_policy.idx)
@ -777,22 +775,21 @@ class TestReplicatorSync(test_db_replicator.TestReplicatorSync):
self.assertEqual(broker.get_reconciler_sync(), 1) self.assertEqual(broker.get_reconciler_sync(), 1)
def test_multiple_out_sync_reconciler_enqueue_normalize(self): def test_multiple_out_sync_reconciler_enqueue_normalize(self):
ts = itertools.count(int(time.time())) ts = (Timestamp(t).internal for t in
itertools.count(int(time.time())))
policy = random.choice(list(POLICIES)) policy = random.choice(list(POLICIES))
broker = self._get_broker('a', 'c', node_index=0) broker = self._get_broker('a', 'c', node_index=0)
broker.initialize(normalize_timestamp(ts.next()), policy.idx) broker.initialize(ts.next(), policy.idx)
remote_policy = random.choice([p for p in POLICIES if p is not remote_policy = random.choice([p for p in POLICIES if p is not
policy]) policy])
remote_broker = self._get_broker('a', 'c', node_index=1) remote_broker = self._get_broker('a', 'c', node_index=1)
remote_broker.initialize(normalize_timestamp(ts.next()), remote_broker.initialize(ts.next(), remote_policy.idx)
remote_policy.idx)
# add some rows to brokers # add some rows to brokers
for db in (broker, remote_broker): for db in (broker, remote_broker):
for p in (policy, remote_policy): for p in (policy, remote_policy):
db.put_object('o-%s' % p.name, normalize_timestamp(ts.next()), db.put_object('o-%s' % p.name, ts.next(), 0, 'content-type',
0, 'content-type', 'etag', 'etag', storage_policy_index=p.idx)
storage_policy_index=p.idx)
db._commit_puts() db._commit_puts()
expected_policy_stats = { expected_policy_stats = {

View File

@ -35,8 +35,8 @@ from swift.common.swob import Request, HeaderKeyDict
import swift.container import swift.container
from swift.container import server as container_server from swift.container import server as container_server
from swift.common import constraints from swift.common import constraints
from swift.common.utils import (normalize_timestamp, mkdirs, public, from swift.common.utils import (Timestamp, mkdirs, public, replication,
replication, lock_parent_directory) lock_parent_directory, json)
from test.unit import fake_http_connect from test.unit import fake_http_connect
from swift.common.storage_policy import (POLICY_INDEX, POLICIES, from swift.common.storage_policy import (POLICY_INDEX, POLICIES,
StoragePolicy) StoragePolicy)
@ -171,9 +171,9 @@ class TestContainerController(unittest.TestCase):
def test_HEAD(self): def test_HEAD(self):
start = int(time.time()) start = int(time.time())
ts = itertools.count(start) ts = (Timestamp(t).internal for t in itertools.count(start))
req = Request.blank('/sda1/p/a/c', method='PUT', headers={ req = Request.blank('/sda1/p/a/c', method='PUT', headers={
'x-timestamp': normalize_timestamp(ts.next())}) 'x-timestamp': ts.next()})
req.get_response(self.controller) req.get_response(self.controller)
req = Request.blank('/sda1/p/a/c', method='HEAD') req = Request.blank('/sda1/p/a/c', method='HEAD')
response = req.get_response(self.controller) response = req.get_response(self.controller)
@ -182,7 +182,7 @@ class TestContainerController(unittest.TestCase):
self.assertEqual(response.headers['x-container-object-count'], '0') self.assertEqual(response.headers['x-container-object-count'], '0')
obj_put_request = Request.blank( obj_put_request = Request.blank(
'/sda1/p/a/c/o', method='PUT', headers={ '/sda1/p/a/c/o', method='PUT', headers={
'x-timestamp': normalize_timestamp(ts.next()), 'x-timestamp': ts.next(),
'x-size': 42, 'x-size': 42,
'x-content-type': 'text/plain', 'x-content-type': 'text/plain',
'x-etag': 'x', 'x-etag': 'x',
@ -196,20 +196,24 @@ class TestContainerController(unittest.TestCase):
self.assertEqual(response.headers['x-container-bytes-used'], '42') self.assertEqual(response.headers['x-container-bytes-used'], '42')
self.assertEqual(response.headers['x-container-object-count'], '1') self.assertEqual(response.headers['x-container-object-count'], '1')
# created at time... # created at time...
self.assert_(float(response.headers['x-timestamp']) >= start) created_at_header = Timestamp(response.headers['x-timestamp'])
self.assertEqual(response.headers['x-timestamp'],
created_at_header.normal)
self.assert_(created_at_header >= start)
self.assertEqual(response.headers['x-put-timestamp'], self.assertEqual(response.headers['x-put-timestamp'],
normalize_timestamp(start)) Timestamp(start).normal)
# backend headers # backend headers
self.assertEqual(int(response.headers[POLICY_INDEX]), self.assertEqual(int(response.headers[POLICY_INDEX]),
int(POLICIES.default)) int(POLICIES.default))
self.assert_(float(response.headers['x-backend-timestamp']) >= start) self.assert_(
Timestamp(response.headers['x-backend-timestamp']) >= start)
self.assertEqual(response.headers['x-backend-put-timestamp'], self.assertEqual(response.headers['x-backend-put-timestamp'],
normalize_timestamp(start)) Timestamp(start).internal)
self.assertEqual(response.headers['x-backend-delete-timestamp'], self.assertEqual(response.headers['x-backend-delete-timestamp'],
normalize_timestamp(0)) Timestamp(0).internal)
self.assertEqual(response.headers['x-backend-status-changed-at'], self.assertEqual(response.headers['x-backend-status-changed-at'],
normalize_timestamp(start)) Timestamp(start).internal)
def test_HEAD_not_found(self): def test_HEAD_not_found(self):
req = Request.blank('/sda1/p/a/c', method='HEAD') req = Request.blank('/sda1/p/a/c', method='HEAD')
@ -217,22 +221,23 @@ class TestContainerController(unittest.TestCase):
self.assertEqual(resp.status_int, 404) self.assertEqual(resp.status_int, 404)
self.assertEqual(int(resp.headers[POLICY_INDEX]), 0) self.assertEqual(int(resp.headers[POLICY_INDEX]), 0)
self.assertEqual(resp.headers['x-backend-timestamp'], self.assertEqual(resp.headers['x-backend-timestamp'],
normalize_timestamp(0)) Timestamp(0).internal)
self.assertEqual(resp.headers['x-backend-put-timestamp'], self.assertEqual(resp.headers['x-backend-put-timestamp'],
normalize_timestamp(0)) Timestamp(0).internal)
self.assertEqual(resp.headers['x-backend-status-changed-at'], self.assertEqual(resp.headers['x-backend-status-changed-at'],
normalize_timestamp(0)) Timestamp(0).internal)
self.assertEqual(resp.headers['x-backend-delete-timestamp'], self.assertEqual(resp.headers['x-backend-delete-timestamp'],
normalize_timestamp(0)) Timestamp(0).internal)
for header in ('x-container-object-count', 'x-container-bytes-used', for header in ('x-container-object-count', 'x-container-bytes-used',
'x-timestamp', 'x-put-timestamp'): 'x-timestamp', 'x-put-timestamp'):
self.assertEqual(resp.headers[header], None) self.assertEqual(resp.headers[header], None)
def test_deleted_headers(self): def test_deleted_headers(self):
ts = itertools.count(int(time.time())) ts = (Timestamp(t).internal for t in
itertools.count(int(time.time())))
request_method_times = { request_method_times = {
'PUT': normalize_timestamp(ts.next()), 'PUT': ts.next(),
'DELETE': normalize_timestamp(ts.next()), 'DELETE': ts.next(),
} }
# setup a deleted container # setup a deleted container
for method in ('PUT', 'DELETE'): for method in ('PUT', 'DELETE'):
@ -249,8 +254,8 @@ class TestContainerController(unittest.TestCase):
# backend headers # backend headers
self.assertEqual(int(resp.headers[POLICY_INDEX]), self.assertEqual(int(resp.headers[POLICY_INDEX]),
int(POLICIES.default)) int(POLICIES.default))
self.assert_(float(resp.headers['x-backend-timestamp']) >= self.assert_(Timestamp(resp.headers['x-backend-timestamp']) >=
float(request_method_times['PUT'])) Timestamp(request_method_times['PUT']))
self.assertEqual(resp.headers['x-backend-put-timestamp'], self.assertEqual(resp.headers['x-backend-put-timestamp'],
request_method_times['PUT']) request_method_times['PUT'])
self.assertEqual(resp.headers['x-backend-delete-timestamp'], self.assertEqual(resp.headers['x-backend-delete-timestamp'],
@ -357,7 +362,7 @@ class TestContainerController(unittest.TestCase):
policy = random.choice(list(POLICIES)) policy = random.choice(list(POLICIES))
# Set metadata header # Set metadata header
req = Request.blank('/sda1/p/a/c', method='PUT', req = Request.blank('/sda1/p/a/c', method='PUT',
headers={'X-Timestamp': normalize_timestamp(1), headers={'X-Timestamp': Timestamp(1).internal,
POLICY_INDEX: policy.idx}) POLICY_INDEX: policy.idx})
resp = req.get_response(self.controller) resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 201) self.assertEquals(resp.status_int, 201)
@ -370,7 +375,7 @@ class TestContainerController(unittest.TestCase):
def test_PUT_no_policy_specified(self): def test_PUT_no_policy_specified(self):
# Set metadata header # Set metadata header
req = Request.blank('/sda1/p/a/c', environ={'REQUEST_METHOD': 'PUT'}, req = Request.blank('/sda1/p/a/c', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': normalize_timestamp(1)}) headers={'X-Timestamp': Timestamp(1).internal})
resp = req.get_response(self.controller) resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 201) self.assertEquals(resp.status_int, 201)
@ -383,18 +388,18 @@ class TestContainerController(unittest.TestCase):
def test_PUT_bad_policy_specified(self): def test_PUT_bad_policy_specified(self):
# Set metadata header # Set metadata header
req = Request.blank('/sda1/p/a/c', environ={'REQUEST_METHOD': 'PUT'}, req = Request.blank('/sda1/p/a/c', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': normalize_timestamp(1), headers={'X-Timestamp': Timestamp(1).internal,
POLICY_INDEX: 'nada'}) POLICY_INDEX: 'nada'})
resp = req.get_response(self.controller) resp = req.get_response(self.controller)
# make sure we get bad response # make sure we get bad response
self.assertEquals(resp.status_int, 400) self.assertEquals(resp.status_int, 400)
def test_PUT_no_policy_change(self): def test_PUT_no_policy_change(self):
ts = itertools.count(1) ts = (Timestamp(t).internal for t in itertools.count(time.time()))
policy = random.choice(list(POLICIES)) policy = random.choice(list(POLICIES))
# Set metadata header # Set metadata header
req = Request.blank('/sda1/p/a/c', method='PUT', headers={ req = Request.blank('/sda1/p/a/c', method='PUT', headers={
'X-Timestamp': normalize_timestamp(ts.next()), 'X-Timestamp': ts.next(),
POLICY_INDEX: policy.idx}) POLICY_INDEX: policy.idx})
resp = req.get_response(self.controller) resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 201) self.assertEquals(resp.status_int, 201)
@ -407,7 +412,7 @@ class TestContainerController(unittest.TestCase):
# now try to update w/o changing the policy # now try to update w/o changing the policy
for method in ('POST', 'PUT'): for method in ('POST', 'PUT'):
req = Request.blank('/sda1/p/a/c', method=method, headers={ req = Request.blank('/sda1/p/a/c', method=method, headers={
'X-Timestamp': normalize_timestamp(ts.next()), 'X-Timestamp': ts.next(),
POLICY_INDEX: policy.idx POLICY_INDEX: policy.idx
}) })
resp = req.get_response(self.controller) resp = req.get_response(self.controller)
@ -419,11 +424,11 @@ class TestContainerController(unittest.TestCase):
self.assertEquals(resp.headers.get(POLICY_INDEX), str(policy.idx)) self.assertEquals(resp.headers.get(POLICY_INDEX), str(policy.idx))
def test_PUT_bad_policy_change(self): def test_PUT_bad_policy_change(self):
ts = itertools.count(1) ts = (Timestamp(t).internal for t in itertools.count(time.time()))
policy = random.choice(list(POLICIES)) policy = random.choice(list(POLICIES))
# Set metadata header # Set metadata header
req = Request.blank('/sda1/p/a/c', method='PUT', headers={ req = Request.blank('/sda1/p/a/c', method='PUT', headers={
'X-Timestamp': normalize_timestamp(ts.next()), 'X-Timestamp': ts.next(),
POLICY_INDEX: policy.idx}) POLICY_INDEX: policy.idx})
resp = req.get_response(self.controller) resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 201) self.assertEquals(resp.status_int, 201)
@ -437,7 +442,7 @@ class TestContainerController(unittest.TestCase):
for other_policy in other_policies: for other_policy in other_policies:
# now try to change it and make sure we get a conflict # now try to change it and make sure we get a conflict
req = Request.blank('/sda1/p/a/c', method='PUT', headers={ req = Request.blank('/sda1/p/a/c', method='PUT', headers={
'X-Timestamp': normalize_timestamp(ts.next()), 'X-Timestamp': ts.next(),
POLICY_INDEX: other_policy.idx POLICY_INDEX: other_policy.idx
}) })
resp = req.get_response(self.controller) resp = req.get_response(self.controller)
@ -451,10 +456,10 @@ class TestContainerController(unittest.TestCase):
self.assertEquals(resp.headers.get(POLICY_INDEX), str(policy.idx)) self.assertEquals(resp.headers.get(POLICY_INDEX), str(policy.idx))
def test_POST_ignores_policy_change(self): def test_POST_ignores_policy_change(self):
ts = itertools.count(1) ts = (Timestamp(t).internal for t in itertools.count(time.time()))
policy = random.choice(list(POLICIES)) policy = random.choice(list(POLICIES))
req = Request.blank('/sda1/p/a/c', method='PUT', headers={ req = Request.blank('/sda1/p/a/c', method='PUT', headers={
'X-Timestamp': normalize_timestamp(ts.next()), 'X-Timestamp': ts.next(),
POLICY_INDEX: policy.idx}) POLICY_INDEX: policy.idx})
resp = req.get_response(self.controller) resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 201) self.assertEquals(resp.status_int, 201)
@ -468,7 +473,7 @@ class TestContainerController(unittest.TestCase):
for other_policy in other_policies: for other_policy in other_policies:
# now try to change it and make sure we get a conflict # now try to change it and make sure we get a conflict
req = Request.blank('/sda1/p/a/c', method='POST', headers={ req = Request.blank('/sda1/p/a/c', method='POST', headers={
'X-Timestamp': normalize_timestamp(ts.next()), 'X-Timestamp': ts.next(),
POLICY_INDEX: other_policy.idx POLICY_INDEX: other_policy.idx
}) })
resp = req.get_response(self.controller) resp = req.get_response(self.controller)
@ -483,10 +488,11 @@ class TestContainerController(unittest.TestCase):
self.assertEquals(resp.headers.get(POLICY_INDEX), str(policy.idx)) self.assertEquals(resp.headers.get(POLICY_INDEX), str(policy.idx))
def test_PUT_no_policy_for_existing_default(self): def test_PUT_no_policy_for_existing_default(self):
ts = itertools.count(1) ts = (Timestamp(t).internal for t in
itertools.count(int(time.time())))
# create a container with the default storage policy # create a container with the default storage policy
req = Request.blank('/sda1/p/a/c', method='PUT', headers={ req = Request.blank('/sda1/p/a/c', method='PUT', headers={
'X-Timestamp': normalize_timestamp(ts.next()), 'X-Timestamp': ts.next(),
}) })
resp = req.get_response(self.controller) resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 201) # sanity check self.assertEqual(resp.status_int, 201) # sanity check
@ -500,7 +506,7 @@ class TestContainerController(unittest.TestCase):
# put again without specifying the storage policy # put again without specifying the storage policy
req = Request.blank('/sda1/p/a/c', method='PUT', headers={ req = Request.blank('/sda1/p/a/c', method='PUT', headers={
'X-Timestamp': normalize_timestamp(ts.next()), 'X-Timestamp': ts.next(),
}) })
resp = req.get_response(self.controller) resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 202) # sanity check self.assertEqual(resp.status_int, 202) # sanity check
@ -517,10 +523,11 @@ class TestContainerController(unittest.TestCase):
# during a config change restart across a multi node cluster. # during a config change restart across a multi node cluster.
proxy_default = random.choice([p for p in POLICIES if not proxy_default = random.choice([p for p in POLICIES if not
p.is_default]) p.is_default])
ts = itertools.count(1) ts = (Timestamp(t).internal for t in
itertools.count(int(time.time())))
# create a container with the default storage policy # create a container with the default storage policy
req = Request.blank('/sda1/p/a/c', method='PUT', headers={ req = Request.blank('/sda1/p/a/c', method='PUT', headers={
'X-Timestamp': normalize_timestamp(ts.next()), 'X-Timestamp': ts.next(),
'X-Backend-Storage-Policy-Default': int(proxy_default), 'X-Backend-Storage-Policy-Default': int(proxy_default),
}) })
resp = req.get_response(self.controller) resp = req.get_response(self.controller)
@ -535,7 +542,7 @@ class TestContainerController(unittest.TestCase):
# put again without proxy specifying the different default # put again without proxy specifying the different default
req = Request.blank('/sda1/p/a/c', method='PUT', headers={ req = Request.blank('/sda1/p/a/c', method='PUT', headers={
'X-Timestamp': normalize_timestamp(ts.next()), 'X-Timestamp': ts.next(),
'X-Backend-Storage-Policy-Default': int(POLICIES.default), 'X-Backend-Storage-Policy-Default': int(POLICIES.default),
}) })
resp = req.get_response(self.controller) resp = req.get_response(self.controller)
@ -549,11 +556,11 @@ class TestContainerController(unittest.TestCase):
int(proxy_default)) int(proxy_default))
def test_PUT_no_policy_for_existing_non_default(self): def test_PUT_no_policy_for_existing_non_default(self):
ts = itertools.count(1) ts = (Timestamp(t).internal for t in itertools.count(time.time()))
non_default_policy = [p for p in POLICIES if not p.is_default][0] non_default_policy = [p for p in POLICIES if not p.is_default][0]
# create a container with the non-default storage policy # create a container with the non-default storage policy
req = Request.blank('/sda1/p/a/c', method='PUT', headers={ req = Request.blank('/sda1/p/a/c', method='PUT', headers={
'X-Timestamp': normalize_timestamp(ts.next()), 'X-Timestamp': ts.next(),
POLICY_INDEX: non_default_policy.idx, POLICY_INDEX: non_default_policy.idx,
}) })
resp = req.get_response(self.controller) resp = req.get_response(self.controller)
@ -568,7 +575,7 @@ class TestContainerController(unittest.TestCase):
# put again without specifiying the storage policy # put again without specifiying the storage policy
req = Request.blank('/sda1/p/a/c', method='PUT', headers={ req = Request.blank('/sda1/p/a/c', method='PUT', headers={
'X-Timestamp': normalize_timestamp(ts.next()), 'X-Timestamp': ts.next(),
}) })
resp = req.get_response(self.controller) resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 202) # sanity check self.assertEqual(resp.status_int, 202) # sanity check
@ -584,7 +591,7 @@ class TestContainerController(unittest.TestCase):
# Set metadata header # Set metadata header
req = Request.blank( req = Request.blank(
'/sda1/p/a/c', environ={'REQUEST_METHOD': 'PUT'}, '/sda1/p/a/c', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': normalize_timestamp(1), headers={'X-Timestamp': Timestamp(1).internal,
'X-Container-Meta-Test': 'Value'}) 'X-Container-Meta-Test': 'Value'})
resp = req.get_response(self.controller) resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 201) self.assertEquals(resp.status_int, 201)
@ -595,7 +602,7 @@ class TestContainerController(unittest.TestCase):
# Set another metadata header, ensuring old one doesn't disappear # Set another metadata header, ensuring old one doesn't disappear
req = Request.blank( req = Request.blank(
'/sda1/p/a/c', environ={'REQUEST_METHOD': 'POST'}, '/sda1/p/a/c', environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': normalize_timestamp(1), headers={'X-Timestamp': Timestamp(1).internal,
'X-Container-Meta-Test2': 'Value2'}) 'X-Container-Meta-Test2': 'Value2'})
resp = req.get_response(self.controller) resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 204) self.assertEquals(resp.status_int, 204)
@ -607,7 +614,7 @@ class TestContainerController(unittest.TestCase):
# Update metadata header # Update metadata header
req = Request.blank( req = Request.blank(
'/sda1/p/a/c', environ={'REQUEST_METHOD': 'PUT'}, '/sda1/p/a/c', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': normalize_timestamp(3), headers={'X-Timestamp': Timestamp(3).internal,
'X-Container-Meta-Test': 'New Value'}) 'X-Container-Meta-Test': 'New Value'})
resp = req.get_response(self.controller) resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 202) self.assertEquals(resp.status_int, 202)
@ -619,7 +626,7 @@ class TestContainerController(unittest.TestCase):
# Send old update to metadata header # Send old update to metadata header
req = Request.blank( req = Request.blank(
'/sda1/p/a/c', environ={'REQUEST_METHOD': 'PUT'}, '/sda1/p/a/c', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': normalize_timestamp(2), headers={'X-Timestamp': Timestamp(2).internal,
'X-Container-Meta-Test': 'Old Value'}) 'X-Container-Meta-Test': 'Old Value'})
resp = req.get_response(self.controller) resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 202) self.assertEquals(resp.status_int, 202)
@ -631,7 +638,7 @@ class TestContainerController(unittest.TestCase):
# Remove metadata header (by setting it to empty) # Remove metadata header (by setting it to empty)
req = Request.blank( req = Request.blank(
'/sda1/p/a/c', environ={'REQUEST_METHOD': 'PUT'}, '/sda1/p/a/c', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': normalize_timestamp(4), headers={'X-Timestamp': Timestamp(4).internal,
'X-Container-Meta-Test': ''}) 'X-Container-Meta-Test': ''})
resp = req.get_response(self.controller) resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 202) self.assertEquals(resp.status_int, 202)
@ -646,7 +653,7 @@ class TestContainerController(unittest.TestCase):
key2 = '%sTest2' % prefix key2 = '%sTest2' % prefix
# Set metadata header # Set metadata header
req = Request.blank('/sda1/p/a/c', environ={'REQUEST_METHOD': 'PUT'}, req = Request.blank('/sda1/p/a/c', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': normalize_timestamp(1), headers={'X-Timestamp': Timestamp(1).internal,
key: 'Value'}) key: 'Value'})
resp = req.get_response(self.controller) resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 201) self.assertEquals(resp.status_int, 201)
@ -656,7 +663,7 @@ class TestContainerController(unittest.TestCase):
self.assertEquals(resp.headers.get(key.lower()), 'Value') self.assertEquals(resp.headers.get(key.lower()), 'Value')
# Set another metadata header, ensuring old one doesn't disappear # Set another metadata header, ensuring old one doesn't disappear
req = Request.blank('/sda1/p/a/c', environ={'REQUEST_METHOD': 'POST'}, req = Request.blank('/sda1/p/a/c', environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': normalize_timestamp(1), headers={'X-Timestamp': Timestamp(1).internal,
key2: 'Value2'}) key2: 'Value2'})
resp = req.get_response(self.controller) resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 204) self.assertEquals(resp.status_int, 204)
@ -667,7 +674,7 @@ class TestContainerController(unittest.TestCase):
self.assertEquals(resp.headers.get(key2.lower()), 'Value2') self.assertEquals(resp.headers.get(key2.lower()), 'Value2')
# Update metadata header # Update metadata header
req = Request.blank('/sda1/p/a/c', environ={'REQUEST_METHOD': 'PUT'}, req = Request.blank('/sda1/p/a/c', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': normalize_timestamp(3), headers={'X-Timestamp': Timestamp(3).internal,
key: 'New Value'}) key: 'New Value'})
resp = req.get_response(self.controller) resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 202) self.assertEquals(resp.status_int, 202)
@ -678,7 +685,7 @@ class TestContainerController(unittest.TestCase):
'New Value') 'New Value')
# Send old update to metadata header # Send old update to metadata header
req = Request.blank('/sda1/p/a/c', environ={'REQUEST_METHOD': 'PUT'}, req = Request.blank('/sda1/p/a/c', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': normalize_timestamp(2), headers={'X-Timestamp': Timestamp(2).internal,
key: 'Old Value'}) key: 'Old Value'})
resp = req.get_response(self.controller) resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 202) self.assertEquals(resp.status_int, 202)
@ -689,7 +696,7 @@ class TestContainerController(unittest.TestCase):
'New Value') 'New Value')
# Remove metadata header (by setting it to empty) # Remove metadata header (by setting it to empty)
req = Request.blank('/sda1/p/a/c', environ={'REQUEST_METHOD': 'PUT'}, req = Request.blank('/sda1/p/a/c', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': normalize_timestamp(4), headers={'X-Timestamp': Timestamp(4).internal,
key: ''}) key: ''})
resp = req.get_response(self.controller) resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 202) self.assertEquals(resp.status_int, 202)
@ -725,13 +732,13 @@ class TestContainerController(unittest.TestCase):
def test_POST_HEAD_metadata(self): def test_POST_HEAD_metadata(self):
req = Request.blank( req = Request.blank(
'/sda1/p/a/c', environ={'REQUEST_METHOD': 'PUT'}, '/sda1/p/a/c', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': normalize_timestamp(1)}) headers={'X-Timestamp': Timestamp(1).internal})
resp = req.get_response(self.controller) resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 201) self.assertEquals(resp.status_int, 201)
# Set metadata header # Set metadata header
req = Request.blank( req = Request.blank(
'/sda1/p/a/c', environ={'REQUEST_METHOD': 'POST'}, '/sda1/p/a/c', environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': normalize_timestamp(1), headers={'X-Timestamp': Timestamp(1).internal,
'X-Container-Meta-Test': 'Value'}) 'X-Container-Meta-Test': 'Value'})
resp = req.get_response(self.controller) resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 204) self.assertEquals(resp.status_int, 204)
@ -742,7 +749,7 @@ class TestContainerController(unittest.TestCase):
# Update metadata header # Update metadata header
req = Request.blank( req = Request.blank(
'/sda1/p/a/c', environ={'REQUEST_METHOD': 'POST'}, '/sda1/p/a/c', environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': normalize_timestamp(3), headers={'X-Timestamp': Timestamp(3).internal,
'X-Container-Meta-Test': 'New Value'}) 'X-Container-Meta-Test': 'New Value'})
resp = req.get_response(self.controller) resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 204) self.assertEquals(resp.status_int, 204)
@ -754,7 +761,7 @@ class TestContainerController(unittest.TestCase):
# Send old update to metadata header # Send old update to metadata header
req = Request.blank( req = Request.blank(
'/sda1/p/a/c', environ={'REQUEST_METHOD': 'POST'}, '/sda1/p/a/c', environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': normalize_timestamp(2), headers={'X-Timestamp': Timestamp(2).internal,
'X-Container-Meta-Test': 'Old Value'}) 'X-Container-Meta-Test': 'Old Value'})
resp = req.get_response(self.controller) resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 204) self.assertEquals(resp.status_int, 204)
@ -766,7 +773,7 @@ class TestContainerController(unittest.TestCase):
# Remove metadata header (by setting it to empty) # Remove metadata header (by setting it to empty)
req = Request.blank( req = Request.blank(
'/sda1/p/a/c', environ={'REQUEST_METHOD': 'POST'}, '/sda1/p/a/c', environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': normalize_timestamp(4), headers={'X-Timestamp': Timestamp(4).internal,
'X-Container-Meta-Test': ''}) 'X-Container-Meta-Test': ''})
resp = req.get_response(self.controller) resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 204) self.assertEquals(resp.status_int, 204)
@ -779,12 +786,12 @@ class TestContainerController(unittest.TestCase):
prefix = get_sys_meta_prefix('container') prefix = get_sys_meta_prefix('container')
key = '%sTest' % prefix key = '%sTest' % prefix
req = Request.blank('/sda1/p/a/c', environ={'REQUEST_METHOD': 'PUT'}, req = Request.blank('/sda1/p/a/c', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': normalize_timestamp(1)}) headers={'X-Timestamp': Timestamp(1).internal})
resp = req.get_response(self.controller) resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 201) self.assertEquals(resp.status_int, 201)
# Set metadata header # Set metadata header
req = Request.blank('/sda1/p/a/c', environ={'REQUEST_METHOD': 'POST'}, req = Request.blank('/sda1/p/a/c', environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': normalize_timestamp(1), headers={'X-Timestamp': Timestamp(1).internal,
key: 'Value'}) key: 'Value'})
resp = req.get_response(self.controller) resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 204) self.assertEquals(resp.status_int, 204)
@ -794,7 +801,7 @@ class TestContainerController(unittest.TestCase):
self.assertEquals(resp.headers.get(key.lower()), 'Value') self.assertEquals(resp.headers.get(key.lower()), 'Value')
# Update metadata header # Update metadata header
req = Request.blank('/sda1/p/a/c', environ={'REQUEST_METHOD': 'POST'}, req = Request.blank('/sda1/p/a/c', environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': normalize_timestamp(3), headers={'X-Timestamp': Timestamp(3).internal,
key: 'New Value'}) key: 'New Value'})
resp = req.get_response(self.controller) resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 204) self.assertEquals(resp.status_int, 204)
@ -805,7 +812,7 @@ class TestContainerController(unittest.TestCase):
'New Value') 'New Value')
# Send old update to metadata header # Send old update to metadata header
req = Request.blank('/sda1/p/a/c', environ={'REQUEST_METHOD': 'POST'}, req = Request.blank('/sda1/p/a/c', environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': normalize_timestamp(2), headers={'X-Timestamp': Timestamp(2).internal,
key: 'Old Value'}) key: 'Old Value'})
resp = req.get_response(self.controller) resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 204) self.assertEquals(resp.status_int, 204)
@ -816,7 +823,7 @@ class TestContainerController(unittest.TestCase):
'New Value') 'New Value')
# Remove metadata header (by setting it to empty) # Remove metadata header (by setting it to empty)
req = Request.blank('/sda1/p/a/c', environ={'REQUEST_METHOD': 'POST'}, req = Request.blank('/sda1/p/a/c', environ={'REQUEST_METHOD': 'POST'},
headers={'X-Timestamp': normalize_timestamp(4), headers={'X-Timestamp': Timestamp(4).internal,
key: ''}) key: ''})
resp = req.get_response(self.controller) resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 204) self.assertEquals(resp.status_int, 204)
@ -962,11 +969,11 @@ class TestContainerController(unittest.TestCase):
req = Request.blank( req = Request.blank(
'/sda1/p/a/c', '/sda1/p/a/c',
environ={'REQUEST_METHOD': 'PUT'}, environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': '0000000001.00000', headers={'X-Timestamp': Timestamp(1).internal,
'X-Account-Host': '%s:%s' % bindsock.getsockname(), 'X-Account-Host': '%s:%s' % bindsock.getsockname(),
'X-Account-Partition': '123', 'X-Account-Partition': '123',
'X-Account-Device': 'sda1'}) 'X-Account-Device': 'sda1'})
event = spawn(accept, 201, '0000000001.00000') event = spawn(accept, 201, Timestamp(1).internal)
try: try:
with Timeout(3): with Timeout(3):
resp = req.get_response(self.controller) resp = req.get_response(self.controller)
@ -984,11 +991,11 @@ class TestContainerController(unittest.TestCase):
req = Request.blank( req = Request.blank(
'/sda1/p/a/c', '/sda1/p/a/c',
environ={'REQUEST_METHOD': 'PUT'}, environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': '0000000003.00000', headers={'X-Timestamp': Timestamp(3).internal,
'X-Account-Host': '%s:%s' % bindsock.getsockname(), 'X-Account-Host': '%s:%s' % bindsock.getsockname(),
'X-Account-Partition': '123', 'X-Account-Partition': '123',
'X-Account-Device': 'sda1'}) 'X-Account-Device': 'sda1'})
event = spawn(accept, 404, '0000000003.00000') event = spawn(accept, 404, Timestamp(3).internal)
try: try:
with Timeout(3): with Timeout(3):
resp = req.get_response(self.controller) resp = req.get_response(self.controller)
@ -1000,11 +1007,11 @@ class TestContainerController(unittest.TestCase):
req = Request.blank( req = Request.blank(
'/sda1/p/a/c', '/sda1/p/a/c',
environ={'REQUEST_METHOD': 'PUT'}, environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Timestamp': '0000000005.00000', headers={'X-Timestamp': Timestamp(5).internal,
'X-Account-Host': '%s:%s' % bindsock.getsockname(), 'X-Account-Host': '%s:%s' % bindsock.getsockname(),
'X-Account-Partition': '123', 'X-Account-Partition': '123',
'X-Account-Device': 'sda1'}) 'X-Account-Device': 'sda1'})
event = spawn(accept, 503, '0000000005.00000') event = spawn(accept, 503, Timestamp(5).internal)
got_exc = False got_exc = False
try: try:
with Timeout(3): with Timeout(3):
@ -1125,9 +1132,9 @@ class TestContainerController(unittest.TestCase):
self.assertEquals(resp.status_int, 404) # sanity self.assertEquals(resp.status_int, 404) # sanity
# backend headers # backend headers
expectations = { expectations = {
'x-backend-put-timestamp': normalize_timestamp(1), 'x-backend-put-timestamp': Timestamp(1).internal,
'x-backend-delete-timestamp': normalize_timestamp(2), 'x-backend-delete-timestamp': Timestamp(2).internal,
'x-backend-status-changed-at': normalize_timestamp(2), 'x-backend-status-changed-at': Timestamp(2).internal,
} }
for header, value in expectations.items(): for header, value in expectations.items():
self.assertEqual(resp.headers[header], value, self.assertEqual(resp.headers[header], value,
@ -1136,9 +1143,9 @@ class TestContainerController(unittest.TestCase):
db = self.controller._get_container_broker('sda1', 'p', 'a', 'c') db = self.controller._get_container_broker('sda1', 'p', 'a', 'c')
self.assertEqual(True, db.is_deleted()) self.assertEqual(True, db.is_deleted())
info = db.get_info() info = db.get_info()
self.assertEquals(info['put_timestamp'], normalize_timestamp('1')) self.assertEquals(info['put_timestamp'], Timestamp('1').internal)
self.assertEquals(info['delete_timestamp'], normalize_timestamp('2')) self.assertEquals(info['delete_timestamp'], Timestamp('2').internal)
self.assertEquals(info['status_changed_at'], normalize_timestamp('2')) self.assertEquals(info['status_changed_at'], Timestamp('2').internal)
# recreate # recreate
req = Request.blank(path, method='PUT', req = Request.blank(path, method='PUT',
headers={'X-Timestamp': '4'}) headers={'X-Timestamp': '4'})
@ -1147,17 +1154,17 @@ class TestContainerController(unittest.TestCase):
db = self.controller._get_container_broker('sda1', 'p', 'a', 'c') db = self.controller._get_container_broker('sda1', 'p', 'a', 'c')
self.assertEqual(False, db.is_deleted()) self.assertEqual(False, db.is_deleted())
info = db.get_info() info = db.get_info()
self.assertEquals(info['put_timestamp'], normalize_timestamp('4')) self.assertEquals(info['put_timestamp'], Timestamp('4').internal)
self.assertEquals(info['delete_timestamp'], normalize_timestamp('2')) self.assertEquals(info['delete_timestamp'], Timestamp('2').internal)
self.assertEquals(info['status_changed_at'], normalize_timestamp('4')) self.assertEquals(info['status_changed_at'], Timestamp('4').internal)
for method in ('GET', 'HEAD'): for method in ('GET', 'HEAD'):
req = Request.blank(path) req = Request.blank(path)
resp = req.get_response(self.controller) resp = req.get_response(self.controller)
expectations = { expectations = {
'x-put-timestamp': normalize_timestamp(4), 'x-put-timestamp': Timestamp(4).normal,
'x-backend-put-timestamp': normalize_timestamp(4), 'x-backend-put-timestamp': Timestamp(4).internal,
'x-backend-delete-timestamp': normalize_timestamp(2), 'x-backend-delete-timestamp': Timestamp(2).internal,
'x-backend-status-changed-at': normalize_timestamp(4), 'x-backend-status-changed-at': Timestamp(4).internal,
} }
for header, expected in expectations.items(): for header, expected in expectations.items():
self.assertEqual(resp.headers[header], expected, self.assertEqual(resp.headers[header], expected,
@ -1217,8 +1224,8 @@ class TestContainerController(unittest.TestCase):
[(exists, db.db_file) for exists in (False, True)]) [(exists, db.db_file) for exists in (False, True)])
# info was updated # info was updated
info = db.get_info() info = db.get_info()
self.assertEquals(info['put_timestamp'], normalize_timestamp('4')) self.assertEquals(info['put_timestamp'], Timestamp('4').internal)
self.assertEquals(info['delete_timestamp'], normalize_timestamp('2')) self.assertEquals(info['delete_timestamp'], Timestamp('2').internal)
def test_DELETE_not_found(self): def test_DELETE_not_found(self):
# Even if the container wasn't previously heard of, the container # Even if the container wasn't previously heard of, the container
@ -1231,7 +1238,7 @@ class TestContainerController(unittest.TestCase):
self.assertEquals(resp.status_int, 404) self.assertEquals(resp.status_int, 404)
def test_change_storage_policy_via_DELETE_then_PUT(self): def test_change_storage_policy_via_DELETE_then_PUT(self):
ts = (normalize_timestamp(t) for t in ts = (Timestamp(t).internal for t in
itertools.count(int(time.time()))) itertools.count(int(time.time())))
policy = random.choice(list(POLICIES)) policy = random.choice(list(POLICIES))
req = Request.blank( req = Request.blank(
@ -1265,7 +1272,7 @@ class TestContainerController(unittest.TestCase):
str(other_policy.idx)) str(other_policy.idx))
def test_change_to_default_storage_policy_via_DELETE_then_PUT(self): def test_change_to_default_storage_policy_via_DELETE_then_PUT(self):
ts = (normalize_timestamp(t) for t in ts = (Timestamp(t).internal for t in
itertools.count(int(time.time()))) itertools.count(int(time.time())))
non_default_policy = random.choice([p for p in POLICIES non_default_policy = random.choice([p for p in POLICIES
if not p.is_default]) if not p.is_default])
@ -1297,40 +1304,155 @@ class TestContainerController(unittest.TestCase):
def test_DELETE_object(self): def test_DELETE_object(self):
req = Request.blank( req = Request.blank(
'/sda1/p/a/c', '/sda1/p/a/c', method='PUT', headers={
environ={'REQUEST_METHOD': 'PUT'}, headers={'X-Timestamp': '2'}) 'X-Timestamp': Timestamp(2).internal})
resp = req.get_response(self.controller) resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 201) self.assertEquals(resp.status_int, 201)
req = Request.blank( req = Request.blank(
'/sda1/p/a/c/o', '/sda1/p/a/c/o', method='PUT', headers={
environ={'REQUEST_METHOD': 'PUT', 'HTTP_X_TIMESTAMP': '0', 'X-Timestamp': Timestamp(0).internal, 'X-Size': 1,
'HTTP_X_SIZE': 1, 'HTTP_X_CONTENT_TYPE': 'text/plain', 'X-Content-Type': 'text/plain', 'X-Etag': 'x'})
'HTTP_X_ETAG': 'x'})
self._update_object_put_headers(req) self._update_object_put_headers(req)
resp = req.get_response(self.controller) resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 201) self.assertEquals(resp.status_int, 201)
req = Request.blank( ts = (Timestamp(t).internal for t in
'/sda1/p/a/c', itertools.count(3))
environ={'REQUEST_METHOD': 'DELETE'}, headers={'X-Timestamp': '3'}) req = Request.blank('/sda1/p/a/c', method='DELETE', headers={
'X-Timestamp': ts.next()})
resp = req.get_response(self.controller) resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 409) self.assertEquals(resp.status_int, 409)
req = Request.blank( req = Request.blank('/sda1/p/a/c/o', method='DELETE', headers={
'/sda1/p/a/c/o', 'X-Timestamp': ts.next()})
environ={'REQUEST_METHOD': 'DELETE'}, headers={'X-Timestamp': '4'})
self._update_object_put_headers(req) self._update_object_put_headers(req)
resp = req.get_response(self.controller) resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 204) self.assertEquals(resp.status_int, 204)
req = Request.blank( req = Request.blank('/sda1/p/a/c', method='DELETE', headers={
'/sda1/p/a/c', 'X-Timestamp': ts.next()})
environ={'REQUEST_METHOD': 'DELETE'}, headers={'X-Timestamp': '5'})
resp = req.get_response(self.controller) resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 204) self.assertEquals(resp.status_int, 204)
req = Request.blank( req = Request.blank('/sda1/p/a/c', method='GET', headers={
'/sda1/p/a/c', 'X-Timestamp': ts.next()})
environ={'REQUEST_METHOD': 'GET'}, headers={'X-Timestamp': '6'})
resp = req.get_response(self.controller) resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 404) self.assertEquals(resp.status_int, 404)
def test_object_update_with_offset(self):
ts = (Timestamp(t).internal for t in
itertools.count(int(time.time())))
# create container
req = Request.blank('/sda1/p/a/c', method='PUT', headers={
'X-Timestamp': ts.next()})
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 201)
# check status
req = Request.blank('/sda1/p/a/c', method='HEAD')
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 204)
self.assertEqual(int(resp.headers[POLICY_INDEX]),
int(POLICIES.default))
# create object
obj_timestamp = ts.next()
req = Request.blank(
'/sda1/p/a/c/o', method='PUT', headers={
'X-Timestamp': obj_timestamp, 'X-Size': 1,
'X-Content-Type': 'text/plain', 'X-Etag': 'x'})
self._update_object_put_headers(req)
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 201)
# check listing
req = Request.blank('/sda1/p/a/c', method='GET',
query_string='format=json')
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 200)
self.assertEqual(int(resp.headers['X-Container-Object-Count']), 1)
self.assertEqual(int(resp.headers['X-Container-Bytes-Used']), 1)
listing_data = json.loads(resp.body)
self.assertEqual(1, len(listing_data))
for obj in listing_data:
self.assertEqual(obj['name'], 'o')
self.assertEqual(obj['bytes'], 1)
self.assertEqual(obj['hash'], 'x')
self.assertEqual(obj['content_type'], 'text/plain')
# send an update with an offset
offset_timestamp = Timestamp(obj_timestamp, offset=1).internal
req = Request.blank(
'/sda1/p/a/c/o', method='PUT', headers={
'X-Timestamp': offset_timestamp, 'X-Size': 2,
'X-Content-Type': 'text/html', 'X-Etag': 'y'})
self._update_object_put_headers(req)
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 201)
# check updated listing
req = Request.blank('/sda1/p/a/c', method='GET',
query_string='format=json')
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 200)
self.assertEqual(int(resp.headers['X-Container-Object-Count']), 1)
self.assertEqual(int(resp.headers['X-Container-Bytes-Used']), 2)
listing_data = json.loads(resp.body)
self.assertEqual(1, len(listing_data))
for obj in listing_data:
self.assertEqual(obj['name'], 'o')
self.assertEqual(obj['bytes'], 2)
self.assertEqual(obj['hash'], 'y')
self.assertEqual(obj['content_type'], 'text/html')
# now overwrite with a newer time
delete_timestamp = ts.next()
req = Request.blank(
'/sda1/p/a/c/o', method='DELETE', headers={
'X-Timestamp': delete_timestamp})
self._update_object_put_headers(req)
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 204)
# check empty listing
req = Request.blank('/sda1/p/a/c', method='GET',
query_string='format=json')
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 200)
self.assertEqual(int(resp.headers['X-Container-Object-Count']), 0)
self.assertEqual(int(resp.headers['X-Container-Bytes-Used']), 0)
listing_data = json.loads(resp.body)
self.assertEqual(0, len(listing_data))
# recreate with an offset
offset_timestamp = Timestamp(delete_timestamp, offset=1).internal
req = Request.blank(
'/sda1/p/a/c/o', method='PUT', headers={
'X-Timestamp': offset_timestamp, 'X-Size': 3,
'X-Content-Type': 'text/enriched', 'X-Etag': 'z'})
self._update_object_put_headers(req)
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 201)
# check un-deleted listing
req = Request.blank('/sda1/p/a/c', method='GET',
query_string='format=json')
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 200)
self.assertEqual(int(resp.headers['X-Container-Object-Count']), 1)
self.assertEqual(int(resp.headers['X-Container-Bytes-Used']), 3)
listing_data = json.loads(resp.body)
self.assertEqual(1, len(listing_data))
for obj in listing_data:
self.assertEqual(obj['name'], 'o')
self.assertEqual(obj['bytes'], 3)
self.assertEqual(obj['hash'], 'z')
self.assertEqual(obj['content_type'], 'text/enriched')
# delete offset with newer offset
delete_timestamp = Timestamp(offset_timestamp, offset=1).internal
req = Request.blank(
'/sda1/p/a/c/o', method='DELETE', headers={
'X-Timestamp': delete_timestamp})
self._update_object_put_headers(req)
resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 204)
# check empty listing
req = Request.blank('/sda1/p/a/c', method='GET',
query_string='format=json')
resp = req.get_response(self.controller)
self.assertEqual(resp.status_int, 200)
self.assertEqual(int(resp.headers['X-Container-Object-Count']), 0)
self.assertEqual(int(resp.headers['X-Container-Bytes-Used']), 0)
listing_data = json.loads(resp.body)
self.assertEqual(0, len(listing_data))
def test_DELETE_account_update(self): def test_DELETE_account_update(self):
bindsock = listen(('127.0.0.1', 0)) bindsock = listen(('127.0.0.1', 0))
@ -1365,11 +1487,11 @@ class TestContainerController(unittest.TestCase):
req = Request.blank( req = Request.blank(
'/sda1/p/a/c', '/sda1/p/a/c',
environ={'REQUEST_METHOD': 'DELETE'}, environ={'REQUEST_METHOD': 'DELETE'},
headers={'X-Timestamp': '0000000002.00000', headers={'X-Timestamp': Timestamp(2).internal,
'X-Account-Host': '%s:%s' % bindsock.getsockname(), 'X-Account-Host': '%s:%s' % bindsock.getsockname(),
'X-Account-Partition': '123', 'X-Account-Partition': '123',
'X-Account-Device': 'sda1'}) 'X-Account-Device': 'sda1'})
event = spawn(accept, 204, '0000000002.00000') event = spawn(accept, 204, Timestamp(2).internal)
try: try:
with Timeout(3): with Timeout(3):
resp = req.get_response(self.controller) resp = req.get_response(self.controller)
@ -1379,18 +1501,18 @@ class TestContainerController(unittest.TestCase):
if err: if err:
raise Exception(err) raise Exception(err)
req = Request.blank( req = Request.blank(
'/sda1/p/a/c', '/sda1/p/a/c', method='PUT', headers={
environ={'REQUEST_METHOD': 'PUT', 'HTTP_X_TIMESTAMP': '2'}) 'X-Timestamp': Timestamp(2).internal})
resp = req.get_response(self.controller) resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 201) self.assertEquals(resp.status_int, 201)
req = Request.blank( req = Request.blank(
'/sda1/p/a/c', '/sda1/p/a/c',
environ={'REQUEST_METHOD': 'DELETE'}, environ={'REQUEST_METHOD': 'DELETE'},
headers={'X-Timestamp': '0000000003.00000', headers={'X-Timestamp': Timestamp(3).internal,
'X-Account-Host': '%s:%s' % bindsock.getsockname(), 'X-Account-Host': '%s:%s' % bindsock.getsockname(),
'X-Account-Partition': '123', 'X-Account-Partition': '123',
'X-Account-Device': 'sda1'}) 'X-Account-Device': 'sda1'})
event = spawn(accept, 404, '0000000003.00000') event = spawn(accept, 404, Timestamp(3).internal)
try: try:
with Timeout(3): with Timeout(3):
resp = req.get_response(self.controller) resp = req.get_response(self.controller)
@ -1400,18 +1522,18 @@ class TestContainerController(unittest.TestCase):
if err: if err:
raise Exception(err) raise Exception(err)
req = Request.blank( req = Request.blank(
'/sda1/p/a/c', '/sda1/p/a/c', method='PUT', headers={
environ={'REQUEST_METHOD': 'PUT', 'HTTP_X_TIMESTAMP': '4'}) 'X-Timestamp': Timestamp(4).internal})
resp = req.get_response(self.controller) resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 201) self.assertEquals(resp.status_int, 201)
req = Request.blank( req = Request.blank(
'/sda1/p/a/c', '/sda1/p/a/c',
environ={'REQUEST_METHOD': 'DELETE'}, environ={'REQUEST_METHOD': 'DELETE'},
headers={'X-Timestamp': '0000000005.00000', headers={'X-Timestamp': Timestamp(5).internal,
'X-Account-Host': '%s:%s' % bindsock.getsockname(), 'X-Account-Host': '%s:%s' % bindsock.getsockname(),
'X-Account-Partition': '123', 'X-Account-Partition': '123',
'X-Account-Device': 'sda1'}) 'X-Account-Device': 'sda1'})
event = spawn(accept, 503, '0000000005.00000') event = spawn(accept, 503, Timestamp(5).internal)
got_exc = False got_exc = False
try: try:
with Timeout(3): with Timeout(3):
@ -1785,18 +1907,11 @@ class TestContainerController(unittest.TestCase):
self.assertEquals(result, [u'\u2603', 'text/plain;charset="utf-8"']) self.assertEquals(result, [u'\u2603', 'text/plain;charset="utf-8"'])
def test_GET_accept_not_valid(self): def test_GET_accept_not_valid(self):
req = Request.blank( req = Request.blank('/sda1/p/a/c', method='PUT', headers={
'/sda1/p/a/c', environ={'REQUEST_METHOD': 'PUT', 'X-Timestamp': Timestamp(0).internal})
'HTTP_X_TIMESTAMP': '0'}) resp = req.get_response(self.controller)
req.get_response(self.controller) self.assertEqual(resp.status_int, 201)
req = Request.blank('/sda1/p/a/c1', environ={'REQUEST_METHOD': 'PUT'}, req = Request.blank('/sda1/p/a/c', method='GET')
headers={'X-Put-Timestamp': '1',
'X-Delete-Timestamp': '0',
'X-Object-Count': '0',
'X-Bytes-Used': '0',
'X-Timestamp': normalize_timestamp(0)})
req.get_response(self.controller)
req = Request.blank('/sda1/p/a/c', environ={'REQUEST_METHOD': 'GET'})
req.accept = 'application/xml*' req.accept = 'application/xml*'
resp = req.get_response(self.controller) resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 406) self.assertEquals(resp.status_int, 406)
@ -2080,13 +2195,12 @@ class TestContainerController(unittest.TestCase):
def test_params_format(self): def test_params_format(self):
req = Request.blank( req = Request.blank(
'/sda1/p/a/c', '/sda1/p/a/c', method='PUT',
headers={'X-Timestamp': normalize_timestamp(1)}, headers={'X-Timestamp': Timestamp(1).internal})
environ={'REQUEST_METHOD': 'PUT'})
req.get_response(self.controller) req.get_response(self.controller)
for format in ('xml', 'json'): for format in ('xml', 'json'):
req = Request.blank('/sda1/p/a/c?format=%s' % format, req = Request.blank('/sda1/p/a/c?format=%s' % format,
environ={'REQUEST_METHOD': 'GET'}) method='GET')
resp = req.get_response(self.controller) resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 200) self.assertEquals(resp.status_int, 200)
@ -2105,9 +2219,8 @@ class TestContainerController(unittest.TestCase):
resp = req.get_response(self.controller) resp = req.get_response(self.controller)
self.assertEquals(resp.status_int, 412, self.assertEquals(resp.status_int, 412,
"%d on param delimiter" % (resp.status_int)) "%d on param delimiter" % (resp.status_int))
req = Request.blank('/sda1/p/a/c', req = Request.blank('/sda1/p/a/c', method='PUT',
headers={'X-Timestamp': normalize_timestamp(1)}, headers={'X-Timestamp': Timestamp(1).internal})
environ={'REQUEST_METHOD': 'PUT'})
req.get_response(self.controller) req.get_response(self.controller)
# Good UTF8 sequence, ignored for limit, doesn't affect other queries # Good UTF8 sequence, ignored for limit, doesn't affect other queries
for param in ('limit', 'marker', 'path', 'prefix', 'end_marker', for param in ('limit', 'marker', 'path', 'prefix', 'end_marker',
@ -2119,7 +2232,7 @@ class TestContainerController(unittest.TestCase):
"%d on param %s" % (resp.status_int, param)) "%d on param %s" % (resp.status_int, param))
def test_put_auto_create(self): def test_put_auto_create(self):
headers = {'x-timestamp': normalize_timestamp(1), headers = {'x-timestamp': Timestamp(1).internal,
'x-size': '0', 'x-size': '0',
'x-content-type': 'text/plain', 'x-content-type': 'text/plain',
'x-etag': 'd41d8cd98f00b204e9800998ecf8427e'} 'x-etag': 'd41d8cd98f00b204e9800998ecf8427e'}
@ -2149,7 +2262,7 @@ class TestContainerController(unittest.TestCase):
self.assertEquals(resp.status_int, 404) self.assertEquals(resp.status_int, 404)
def test_delete_auto_create(self): def test_delete_auto_create(self):
headers = {'x-timestamp': normalize_timestamp(1)} headers = {'x-timestamp': Timestamp(1).internal}
req = Request.blank('/sda1/p/a/c/o', req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'DELETE'}, environ={'REQUEST_METHOD': 'DELETE'},
@ -2177,7 +2290,7 @@ class TestContainerController(unittest.TestCase):
def test_content_type_on_HEAD(self): def test_content_type_on_HEAD(self):
Request.blank('/sda1/p/a/o', Request.blank('/sda1/p/a/o',
headers={'X-Timestamp': normalize_timestamp(1)}, headers={'X-Timestamp': Timestamp(1).internal},
environ={'REQUEST_METHOD': 'PUT'}).get_response( environ={'REQUEST_METHOD': 'PUT'}).get_response(
self.controller) self.controller)
@ -2267,7 +2380,7 @@ class TestContainerController(unittest.TestCase):
'x-bytes-used': 0, 'x-bytes-used': 0,
'x-delete-timestamp': '0', 'x-delete-timestamp': '0',
'x-object-count': 0, 'x-object-count': 0,
'x-put-timestamp': '0000012345.00000', 'x-put-timestamp': Timestamp(12345).internal,
POLICY_INDEX: '%s' % POLICIES.default.idx, POLICY_INDEX: '%s' % POLICIES.default.idx,
'referer': 'PUT http://localhost/sda1/p/a/c', 'referer': 'PUT http://localhost/sda1/p/a/c',
'user-agent': 'container-server %d' % os.getpid(), 'user-agent': 'container-server %d' % os.getpid(),
@ -2285,7 +2398,7 @@ class TestContainerController(unittest.TestCase):
'x-bytes-used': 0, 'x-bytes-used': 0,
'x-delete-timestamp': '0', 'x-delete-timestamp': '0',
'x-object-count': 0, 'x-object-count': 0,
'x-put-timestamp': '0000012345.00000', 'x-put-timestamp': Timestamp(12345).internal,
POLICY_INDEX: '%s' % POLICIES.default.idx, POLICY_INDEX: '%s' % POLICIES.default.idx,
'referer': 'PUT http://localhost/sda1/p/a/c', 'referer': 'PUT http://localhost/sda1/p/a/c',
'user-agent': 'container-server %d' % os.getpid(), 'user-agent': 'container-server %d' % os.getpid(),

View File

@ -38,7 +38,7 @@ from test.unit import (FakeLogger, mock as unit_mock, temptree,
from swift.obj import diskfile from swift.obj import diskfile
from swift.common import utils from swift.common import utils
from swift.common.utils import hash_path, mkdirs, normalize_timestamp from swift.common.utils import hash_path, mkdirs, Timestamp
from swift.common import ring from swift.common import ring
from swift.common.exceptions import DiskFileNotExist, DiskFileQuarantined, \ from swift.common.exceptions import DiskFileNotExist, DiskFileQuarantined, \
DiskFileDeviceUnavailable, DiskFileDeleted, DiskFileNotOpen, \ DiskFileDeviceUnavailable, DiskFileDeleted, DiskFileNotOpen, \
@ -254,7 +254,7 @@ class TestDiskFileModuleMethods(unittest.TestCase):
mkdirs(df._datadir) mkdirs(df._datadir)
f = open( f = open(
os.path.join(df._datadir, os.path.join(df._datadir,
normalize_timestamp(time() - 100) + '.ts'), Timestamp(time() - 100).internal + '.ts'),
'wb') 'wb')
f.write('1234567890') f.write('1234567890')
f.close() f.close()
@ -272,7 +272,7 @@ class TestDiskFileModuleMethods(unittest.TestCase):
mkdirs(df._datadir) mkdirs(df._datadir)
f = open( f = open(
os.path.join(df._datadir, os.path.join(df._datadir,
normalize_timestamp(time() - 100) + '.ts'), Timestamp(time() - 100).internal + '.ts'),
'wb') 'wb')
f.write('1234567890') f.write('1234567890')
f.close() f.close()
@ -304,7 +304,7 @@ class TestDiskFileModuleMethods(unittest.TestCase):
f = open( f = open(
os.path.join( os.path.join(
df._datadir, df._datadir,
normalize_timestamp(int(time()) - tdiff) + suff), Timestamp(int(time()) - tdiff).internal + suff),
'wb') 'wb')
f.write('1234567890') f.write('1234567890')
f.close() f.close()
@ -330,7 +330,7 @@ class TestDiskFileModuleMethods(unittest.TestCase):
f = open( f = open(
os.path.join( os.path.join(
df._datadir, df._datadir,
normalize_timestamp(int(time()) - tdiff) + suff), Timestamp(int(time()) - tdiff).internal + suff),
'wb') 'wb')
f.write('1234567890') f.write('1234567890')
f.close() f.close()
@ -412,7 +412,7 @@ class TestDiskFileModuleMethods(unittest.TestCase):
mkdirs(df._datadir) mkdirs(df._datadir)
with open( with open(
os.path.join(df._datadir, os.path.join(df._datadir,
normalize_timestamp(time()) + '.ts'), Timestamp(time()).internal + '.ts'),
'wb') as f: 'wb') as f:
f.write('1234567890') f.write('1234567890')
part = os.path.join(self.objects, '0') part = os.path.join(self.objects, '0')
@ -442,7 +442,7 @@ class TestDiskFileModuleMethods(unittest.TestCase):
mkdirs(df._datadir) mkdirs(df._datadir)
with open( with open(
os.path.join(df._datadir, os.path.join(df._datadir,
normalize_timestamp(time()) + '.ts'), Timestamp(time()).internal + '.ts'),
'wb') as f: 'wb') as f:
f.write('1234567890') f.write('1234567890')
part = os.path.join(self.objects, '0') part = os.path.join(self.objects, '0')
@ -462,7 +462,7 @@ class TestDiskFileModuleMethods(unittest.TestCase):
mkdirs(df._datadir) mkdirs(df._datadir)
with open( with open(
os.path.join(df._datadir, os.path.join(df._datadir,
normalize_timestamp(time()) + '.ts'), Timestamp(time()).internal + '.ts'),
'wb') as f: 'wb') as f:
f.write('1234567890') f.write('1234567890')
part = os.path.join(self.objects, '0') part = os.path.join(self.objects, '0')
@ -479,7 +479,7 @@ class TestDiskFileModuleMethods(unittest.TestCase):
mkdirs(df._datadir) mkdirs(df._datadir)
with open( with open(
os.path.join(df._datadir, os.path.join(df._datadir,
normalize_timestamp(time()) + '.ts'), Timestamp(time()).internal + '.ts'),
'wb') as f: 'wb') as f:
f.write('1234567890') f.write('1234567890')
part = os.path.join(self.objects, '0') part = os.path.join(self.objects, '0')
@ -516,7 +516,7 @@ class TestDiskFileModuleMethods(unittest.TestCase):
mkdirs(df._datadir) mkdirs(df._datadir)
with open( with open(
os.path.join(df._datadir, os.path.join(df._datadir,
normalize_timestamp(time()) + '.ts'), Timestamp(time()).internal + '.ts'),
'wb') as f: 'wb') as f:
f.write('1234567890') f.write('1234567890')
part = os.path.join(self.objects, '0') part = os.path.join(self.objects, '0')
@ -554,89 +554,89 @@ class TestDiskFileModuleMethods(unittest.TestCase):
def test_hash_cleanup_listdir_purge_data_newer_ts(self): def test_hash_cleanup_listdir_purge_data_newer_ts(self):
# purge .data if there's a newer .ts # purge .data if there's a newer .ts
file1 = normalize_timestamp(time()) + '.data' file1 = Timestamp(time()).internal + '.data'
file2 = normalize_timestamp(time() + 1) + '.ts' file2 = Timestamp(time() + 1).internal + '.ts'
file_list = [file1, file2] file_list = [file1, file2]
self.check_hash_cleanup_listdir(file_list, [file2]) self.check_hash_cleanup_listdir(file_list, [file2])
def test_hash_cleanup_listdir_purge_ts_newer_data(self): def test_hash_cleanup_listdir_purge_ts_newer_data(self):
# purge .ts if there's a newer .data # purge .ts if there's a newer .data
file1 = normalize_timestamp(time()) + '.ts' file1 = Timestamp(time()).internal + '.ts'
file2 = normalize_timestamp(time() + 1) + '.data' file2 = Timestamp(time() + 1).internal + '.data'
file_list = [file1, file2] file_list = [file1, file2]
self.check_hash_cleanup_listdir(file_list, [file2]) self.check_hash_cleanup_listdir(file_list, [file2])
def test_hash_cleanup_listdir_keep_meta_data_purge_ts(self): def test_hash_cleanup_listdir_keep_meta_data_purge_ts(self):
# keep .meta and .data if meta newer than data and purge .ts # keep .meta and .data if meta newer than data and purge .ts
file1 = normalize_timestamp(time()) + '.ts' file1 = Timestamp(time()).internal + '.ts'
file2 = normalize_timestamp(time() + 1) + '.data' file2 = Timestamp(time() + 1).internal + '.data'
file3 = normalize_timestamp(time() + 2) + '.meta' file3 = Timestamp(time() + 2).internal + '.meta'
file_list = [file1, file2, file3] file_list = [file1, file2, file3]
self.check_hash_cleanup_listdir(file_list, [file3, file2]) self.check_hash_cleanup_listdir(file_list, [file3, file2])
def test_hash_cleanup_listdir_keep_one_ts(self): def test_hash_cleanup_listdir_keep_one_ts(self):
# keep only latest of multiple .ts files # keep only latest of multiple .ts files
file1 = normalize_timestamp(time()) + '.ts' file1 = Timestamp(time()).internal + '.ts'
file2 = normalize_timestamp(time() + 1) + '.ts' file2 = Timestamp(time() + 1).internal + '.ts'
file3 = normalize_timestamp(time() + 2) + '.ts' file3 = Timestamp(time() + 2).internal + '.ts'
file_list = [file1, file2, file3] file_list = [file1, file2, file3]
self.check_hash_cleanup_listdir(file_list, [file3]) self.check_hash_cleanup_listdir(file_list, [file3])
def test_hash_cleanup_listdir_keep_one_data(self): def test_hash_cleanup_listdir_keep_one_data(self):
# keep only latest of multiple .data files # keep only latest of multiple .data files
file1 = normalize_timestamp(time()) + '.data' file1 = Timestamp(time()).internal + '.data'
file2 = normalize_timestamp(time() + 1) + '.data' file2 = Timestamp(time() + 1).internal + '.data'
file3 = normalize_timestamp(time() + 2) + '.data' file3 = Timestamp(time() + 2).internal + '.data'
file_list = [file1, file2, file3] file_list = [file1, file2, file3]
self.check_hash_cleanup_listdir(file_list, [file3]) self.check_hash_cleanup_listdir(file_list, [file3])
def test_hash_cleanup_listdir_keep_one_meta(self): def test_hash_cleanup_listdir_keep_one_meta(self):
# keep only latest of multiple .meta files # keep only latest of multiple .meta files
file1 = normalize_timestamp(time()) + '.data' file1 = Timestamp(time()).internal + '.data'
file2 = normalize_timestamp(time() + 1) + '.meta' file2 = Timestamp(time() + 1).internal + '.meta'
file3 = normalize_timestamp(time() + 2) + '.meta' file3 = Timestamp(time() + 2).internal + '.meta'
file_list = [file1, file2, file3] file_list = [file1, file2, file3]
self.check_hash_cleanup_listdir(file_list, [file3, file1]) self.check_hash_cleanup_listdir(file_list, [file3, file1])
def test_hash_cleanup_listdir_ignore_orphaned_ts(self): def test_hash_cleanup_listdir_ignore_orphaned_ts(self):
# A more recent orphaned .meta file will prevent old .ts files # A more recent orphaned .meta file will prevent old .ts files
# from being cleaned up otherwise # from being cleaned up otherwise
file1 = normalize_timestamp(time()) + '.ts' file1 = Timestamp(time()).internal + '.ts'
file2 = normalize_timestamp(time() + 1) + '.ts' file2 = Timestamp(time() + 1).internal + '.ts'
file3 = normalize_timestamp(time() + 2) + '.meta' file3 = Timestamp(time() + 2).internal + '.meta'
file_list = [file1, file2, file3] file_list = [file1, file2, file3]
self.check_hash_cleanup_listdir(file_list, [file3, file2]) self.check_hash_cleanup_listdir(file_list, [file3, file2])
def test_hash_cleanup_listdir_purge_old_data_only(self): def test_hash_cleanup_listdir_purge_old_data_only(self):
# Oldest .data will be purge, .meta and .ts won't be touched # Oldest .data will be purge, .meta and .ts won't be touched
file1 = normalize_timestamp(time()) + '.data' file1 = Timestamp(time()).internal + '.data'
file2 = normalize_timestamp(time() + 1) + '.ts' file2 = Timestamp(time() + 1).internal + '.ts'
file3 = normalize_timestamp(time() + 2) + '.meta' file3 = Timestamp(time() + 2).internal + '.meta'
file_list = [file1, file2, file3] file_list = [file1, file2, file3]
self.check_hash_cleanup_listdir(file_list, [file3, file2]) self.check_hash_cleanup_listdir(file_list, [file3, file2])
def test_hash_cleanup_listdir_purge_old_ts(self): def test_hash_cleanup_listdir_purge_old_ts(self):
# A single old .ts file will be removed # A single old .ts file will be removed
file1 = normalize_timestamp(time() - (diskfile.ONE_WEEK + 1)) + '.ts' file1 = Timestamp(time() - (diskfile.ONE_WEEK + 1)).internal + '.ts'
file_list = [file1] file_list = [file1]
self.check_hash_cleanup_listdir(file_list, []) self.check_hash_cleanup_listdir(file_list, [])
def test_hash_cleanup_listdir_meta_keeps_old_ts(self): def test_hash_cleanup_listdir_meta_keeps_old_ts(self):
# An orphaned .meta will not clean up a very old .ts # An orphaned .meta will not clean up a very old .ts
file1 = normalize_timestamp(time() - (diskfile.ONE_WEEK + 1)) + '.ts' file1 = Timestamp(time() - (diskfile.ONE_WEEK + 1)).internal + '.ts'
file2 = normalize_timestamp(time() + 2) + '.meta' file2 = Timestamp(time() + 2).internal + '.meta'
file_list = [file1, file2] file_list = [file1, file2]
self.check_hash_cleanup_listdir(file_list, [file2, file1]) self.check_hash_cleanup_listdir(file_list, [file2, file1])
def test_hash_cleanup_listdir_keep_single_old_data(self): def test_hash_cleanup_listdir_keep_single_old_data(self):
# A single old .data file will not be removed # A single old .data file will not be removed
file1 = normalize_timestamp(time() - (diskfile.ONE_WEEK + 1)) + '.data' file1 = Timestamp(time() - (diskfile.ONE_WEEK + 1)).internal + '.data'
file_list = [file1] file_list = [file1]
self.check_hash_cleanup_listdir(file_list, [file1]) self.check_hash_cleanup_listdir(file_list, [file1])
def test_hash_cleanup_listdir_keep_single_old_meta(self): def test_hash_cleanup_listdir_keep_single_old_meta(self):
# A single old .meta file will not be removed # A single old .meta file will not be removed
file1 = normalize_timestamp(time() - (diskfile.ONE_WEEK + 1)) + '.meta' file1 = Timestamp(time() - (diskfile.ONE_WEEK + 1)).internal + '.meta'
file_list = [file1] file_list = [file1]
self.check_hash_cleanup_listdir(file_list, [file1]) self.check_hash_cleanup_listdir(file_list, [file1])
@ -865,7 +865,7 @@ class TestDiskFileManager(unittest.TestCase):
def test_pickle_async_update(self): def test_pickle_async_update(self):
self.df_mgr.logger.increment = mock.MagicMock() self.df_mgr.logger.increment = mock.MagicMock()
ts = normalize_timestamp(10000.0) ts = Timestamp(10000.0).internal
with mock.patch('swift.obj.diskfile.write_pickle') as wp: with mock.patch('swift.obj.diskfile.write_pickle') as wp:
self.df_mgr.pickle_async_update(self.existing_device1, self.df_mgr.pickle_async_update(self.existing_device1,
'a', 'c', 'o', 'a', 'c', 'o',
@ -981,11 +981,11 @@ class TestDiskFile(unittest.TestCase):
mkdirs(df._datadir) mkdirs(df._datadir)
if timestamp is None: if timestamp is None:
timestamp = time() timestamp = time()
timestamp = normalize_timestamp(timestamp) timestamp = Timestamp(timestamp).internal
if not metadata: if not metadata:
metadata = {} metadata = {}
if 'X-Timestamp' not in metadata: if 'X-Timestamp' not in metadata:
metadata['X-Timestamp'] = normalize_timestamp(timestamp) metadata['X-Timestamp'] = Timestamp(timestamp).internal
if 'ETag' not in metadata: if 'ETag' not in metadata:
etag = md5() etag = md5()
etag.update(data) etag.update(data)
@ -1038,13 +1038,13 @@ class TestDiskFile(unittest.TestCase):
def test_get_metadata(self): def test_get_metadata(self):
df = self._create_test_file('1234567890', timestamp=42) df = self._create_test_file('1234567890', timestamp=42)
md = df.get_metadata() md = df.get_metadata()
self.assertEqual(md['X-Timestamp'], normalize_timestamp(42)) self.assertEqual(md['X-Timestamp'], Timestamp(42).internal)
def test_read_metadata(self): def test_read_metadata(self):
self._create_test_file('1234567890', timestamp=42) self._create_test_file('1234567890', timestamp=42)
df = self._simple_get_diskfile() df = self._simple_get_diskfile()
md = df.read_metadata() md = df.read_metadata()
self.assertEqual(md['X-Timestamp'], normalize_timestamp(42)) self.assertEqual(md['X-Timestamp'], Timestamp(42).internal)
def test_get_metadata_not_opened(self): def test_get_metadata_not_opened(self):
df = self._simple_get_diskfile() df = self._simple_get_diskfile()
@ -1069,7 +1069,7 @@ class TestDiskFile(unittest.TestCase):
self.assertEquals('1024', df._metadata['Content-Length']) self.assertEquals('1024', df._metadata['Content-Length'])
# write some new metadata (fast POST, don't send orig meta, ts 42) # write some new metadata (fast POST, don't send orig meta, ts 42)
df = self._simple_get_diskfile() df = self._simple_get_diskfile()
df.write_metadata({'X-Timestamp': normalize_timestamp(42), df.write_metadata({'X-Timestamp': Timestamp(42).internal,
'X-Object-Meta-Key2': 'Value2'}) 'X-Object-Meta-Key2': 'Value2'})
df = self._simple_get_diskfile() df = self._simple_get_diskfile()
with df.open(): with df.open():
@ -1240,7 +1240,7 @@ class TestDiskFile(unittest.TestCase):
if ts: if ts:
timestamp = ts timestamp = ts
else: else:
timestamp = normalize_timestamp(time()) timestamp = Timestamp(time()).internal
if prealloc: if prealloc:
prealloc_size = fsize prealloc_size = fsize
else: else:
@ -1578,7 +1578,7 @@ class TestDiskFile(unittest.TestCase):
def test_write_metadata(self): def test_write_metadata(self):
df = self._create_test_file('1234567890') df = self._create_test_file('1234567890')
timestamp = normalize_timestamp(time()) timestamp = Timestamp(time()).internal
metadata = {'X-Timestamp': timestamp, 'X-Object-Meta-test': 'data'} metadata = {'X-Timestamp': timestamp, 'X-Object-Meta-test': 'data'}
df.write_metadata(metadata) df.write_metadata(metadata)
dl = os.listdir(df._datadir) dl = os.listdir(df._datadir)
@ -1590,7 +1590,7 @@ class TestDiskFile(unittest.TestCase):
df = self._get_open_disk_file() df = self._get_open_disk_file()
ts = time() ts = time()
df.delete(ts) df.delete(ts)
exp_name = '%s.ts' % str(normalize_timestamp(ts)) exp_name = '%s.ts' % Timestamp(ts).internal
dl = os.listdir(df._datadir) dl = os.listdir(df._datadir)
self.assertEquals(len(dl), 1) self.assertEquals(len(dl), 1)
self.assertTrue(exp_name in set(dl)) self.assertTrue(exp_name in set(dl))
@ -1599,7 +1599,7 @@ class TestDiskFile(unittest.TestCase):
df = self._get_open_disk_file() df = self._get_open_disk_file()
ts = time() ts = time()
df.delete(ts) df.delete(ts)
exp_name = '%s.ts' % str(normalize_timestamp(ts)) exp_name = '%s.ts' % str(Timestamp(ts).internal)
dl = os.listdir(df._datadir) dl = os.listdir(df._datadir)
self.assertEquals(len(dl), 1) self.assertEquals(len(dl), 1)
self.assertTrue(exp_name in set(dl)) self.assertTrue(exp_name in set(dl))
@ -1610,7 +1610,7 @@ class TestDiskFile(unittest.TestCase):
df = self._get_open_disk_file() df = self._get_open_disk_file()
ts = time() ts = time()
df.delete(ts) df.delete(ts)
exp_name = '%s.ts' % str(normalize_timestamp(ts)) exp_name = '%s.ts' % str(Timestamp(ts).internal)
dl = os.listdir(df._datadir) dl = os.listdir(df._datadir)
self.assertEquals(len(dl), 1) self.assertEquals(len(dl), 1)
self.assertTrue(exp_name in set(dl)) self.assertTrue(exp_name in set(dl))
@ -1685,7 +1685,7 @@ class TestDiskFile(unittest.TestCase):
try: try:
df.open() df.open()
except DiskFileDeleted as d: except DiskFileDeleted as d:
self.assertEquals(d.timestamp, normalize_timestamp(10)) self.assertEquals(d.timestamp, Timestamp(10).internal)
else: else:
self.fail("Expected DiskFileDeleted exception") self.fail("Expected DiskFileDeleted exception")
@ -1701,7 +1701,7 @@ class TestDiskFile(unittest.TestCase):
try: try:
df.open() df.open()
except DiskFileDeleted as d: except DiskFileDeleted as d:
self.assertEquals(d.timestamp, normalize_timestamp(8)) self.assertEquals(d.timestamp, Timestamp(8).internal)
else: else:
self.fail("Expected DiskFileDeleted exception") self.fail("Expected DiskFileDeleted exception")
@ -1717,7 +1717,7 @@ class TestDiskFile(unittest.TestCase):
with df.open(): with df.open():
self.assertTrue('X-Timestamp' in df._metadata) self.assertTrue('X-Timestamp' in df._metadata)
self.assertEquals(df._metadata['X-Timestamp'], self.assertEquals(df._metadata['X-Timestamp'],
normalize_timestamp(10)) Timestamp(10).internal)
self.assertTrue('deleted' not in df._metadata) self.assertTrue('deleted' not in df._metadata)
def test_ondisk_search_loop_data_meta_ts(self): def test_ondisk_search_loop_data_meta_ts(self):
@ -1732,7 +1732,7 @@ class TestDiskFile(unittest.TestCase):
with df.open(): with df.open():
self.assertTrue('X-Timestamp' in df._metadata) self.assertTrue('X-Timestamp' in df._metadata)
self.assertEquals(df._metadata['X-Timestamp'], self.assertEquals(df._metadata['X-Timestamp'],
normalize_timestamp(10)) Timestamp(10).internal)
self.assertTrue('deleted' not in df._metadata) self.assertTrue('deleted' not in df._metadata)
def test_ondisk_search_loop_wayward_files_ignored(self): def test_ondisk_search_loop_wayward_files_ignored(self):
@ -1748,7 +1748,7 @@ class TestDiskFile(unittest.TestCase):
with df.open(): with df.open():
self.assertTrue('X-Timestamp' in df._metadata) self.assertTrue('X-Timestamp' in df._metadata)
self.assertEquals(df._metadata['X-Timestamp'], self.assertEquals(df._metadata['X-Timestamp'],
normalize_timestamp(10)) Timestamp(10).internal)
self.assertTrue('deleted' not in df._metadata) self.assertTrue('deleted' not in df._metadata)
def test_ondisk_search_loop_listdir_error(self): def test_ondisk_search_loop_listdir_error(self):
@ -1995,8 +1995,8 @@ class TestDiskFile(unittest.TestCase):
suffixes=['456'])), []) suffixes=['456'])), [])
def test_yield_hashes(self): def test_yield_hashes(self):
fresh_ts = normalize_timestamp(time() - 10) fresh_ts = Timestamp(time() - 10).internal
fresher_ts = normalize_timestamp(time() - 1) fresher_ts = Timestamp(time() - 1).internal
def _listdir(path): def _listdir(path):
if path.endswith('/dev/objects/9'): if path.endswith('/dev/objects/9'):
@ -2037,8 +2037,8 @@ class TestDiskFile(unittest.TestCase):
'9373a92d072897b136b3fc06595b7456', fresher_ts)]) '9373a92d072897b136b3fc06595b7456', fresher_ts)])
def test_yield_hashes_suffixes(self): def test_yield_hashes_suffixes(self):
fresh_ts = normalize_timestamp(time() - 10) fresh_ts = Timestamp(time() - 10).internal
fresher_ts = normalize_timestamp(time() - 1) fresher_ts = Timestamp(time() - 1).internal
def _listdir(path): def _listdir(path):
if path.endswith('/dev/objects/9'): if path.endswith('/dev/objects/9'):
@ -2095,7 +2095,7 @@ class TestDiskFile(unittest.TestCase):
df = self._get_open_disk_file() df = self._get_open_disk_file()
ts = time() ts = time()
df.delete(ts) df.delete(ts)
exp_name = '%s.ts' % str(normalize_timestamp(ts)) exp_name = '%s.ts' % str(Timestamp(ts).internal)
dl = os.listdir(df._datadir) dl = os.listdir(df._datadir)
self.assertEquals(len(dl), 1) self.assertEquals(len(dl), 1)
self.assertTrue(exp_name in set(dl)) self.assertTrue(exp_name in set(dl))
@ -2127,7 +2127,7 @@ class TestDiskFile(unittest.TestCase):
df = self._get_open_disk_file() df = self._get_open_disk_file()
ts = time() ts = time()
df.delete(ts) df.delete(ts)
exp_name = '%s.ts' % str(normalize_timestamp(ts)) exp_name = '%s.ts' % str(Timestamp(ts).internal)
dl = os.listdir(df._datadir) dl = os.listdir(df._datadir)
self.assertEquals(len(dl), 1) self.assertEquals(len(dl), 1)
self.assertTrue(exp_name in set(dl)) self.assertTrue(exp_name in set(dl))
@ -2159,7 +2159,7 @@ class TestDiskFile(unittest.TestCase):
df.delete(ts) df.delete(ts)
except OSError: except OSError:
self.fail("OSError raised when it should have been swallowed") self.fail("OSError raised when it should have been swallowed")
exp_name = '%s.ts' % str(normalize_timestamp(ts)) exp_name = '%s.ts' % str(Timestamp(ts).internal)
dl = os.listdir(df._datadir) dl = os.listdir(df._datadir)
self.assertEquals(len(dl), 2) self.assertEquals(len(dl), 2)
self.assertTrue(exp_name in set(dl)) self.assertTrue(exp_name in set(dl))

View File

@ -554,11 +554,11 @@ class TestObjectController(unittest.TestCase):
self.testdir, 'sda1', self.testdir, 'sda1',
storage_directory(diskfile.get_data_dir(0), storage_directory(diskfile.get_data_dir(0),
'p', hash_path('a', 'c', 'o')), 'p', hash_path('a', 'c', 'o')),
timestamp + '.data') utils.Timestamp(timestamp).internal + '.data')
self.assert_(os.path.isfile(objfile)) self.assert_(os.path.isfile(objfile))
self.assertEquals(open(objfile).read(), 'VERIFY') self.assertEquals(open(objfile).read(), 'VERIFY')
self.assertEquals(diskfile.read_metadata(objfile), self.assertEquals(diskfile.read_metadata(objfile),
{'X-Timestamp': timestamp, {'X-Timestamp': utils.Timestamp(timestamp).internal,
'Content-Length': '6', 'Content-Length': '6',
'ETag': '0b4c12d7e0a73840c1c4f148fda3b037', 'ETag': '0b4c12d7e0a73840c1c4f148fda3b037',
'Content-Type': 'application/octet-stream', 'Content-Type': 'application/octet-stream',
@ -587,11 +587,11 @@ class TestObjectController(unittest.TestCase):
self.testdir, 'sda1', self.testdir, 'sda1',
storage_directory(diskfile.get_data_dir(0), 'p', storage_directory(diskfile.get_data_dir(0), 'p',
hash_path('a', 'c', 'o')), hash_path('a', 'c', 'o')),
timestamp + '.data') utils.Timestamp(timestamp).internal + '.data')
self.assert_(os.path.isfile(objfile)) self.assert_(os.path.isfile(objfile))
self.assertEquals(open(objfile).read(), 'VERIFY TWO') self.assertEquals(open(objfile).read(), 'VERIFY TWO')
self.assertEquals(diskfile.read_metadata(objfile), self.assertEquals(diskfile.read_metadata(objfile),
{'X-Timestamp': timestamp, {'X-Timestamp': utils.Timestamp(timestamp).internal,
'Content-Length': '10', 'Content-Length': '10',
'ETag': 'b381a4c5dab1eaa1eb9711fa647cd039', 'ETag': 'b381a4c5dab1eaa1eb9711fa647cd039',
'Content-Type': 'text/plain', 'Content-Type': 'text/plain',
@ -622,11 +622,11 @@ class TestObjectController(unittest.TestCase):
self.testdir, 'sda1', self.testdir, 'sda1',
storage_directory(diskfile.get_data_dir(0), 'p', storage_directory(diskfile.get_data_dir(0), 'p',
hash_path('a', 'c', 'o')), hash_path('a', 'c', 'o')),
timestamp + '.data') utils.Timestamp(timestamp).internal + '.data')
self.assertTrue(os.path.isfile(objfile)) self.assertTrue(os.path.isfile(objfile))
self.assertEqual(open(objfile).read(), 'VERIFY TWO') self.assertEqual(open(objfile).read(), 'VERIFY TWO')
self.assertEqual(diskfile.read_metadata(objfile), self.assertEqual(diskfile.read_metadata(objfile),
{'X-Timestamp': timestamp, {'X-Timestamp': utils.Timestamp(timestamp).internal,
'Content-Length': '10', 'Content-Length': '10',
'ETag': 'b381a4c5dab1eaa1eb9711fa647cd039', 'ETag': 'b381a4c5dab1eaa1eb9711fa647cd039',
'Content-Type': 'text/plain', 'Content-Type': 'text/plain',
@ -696,11 +696,11 @@ class TestObjectController(unittest.TestCase):
self.testdir, 'sda1', self.testdir, 'sda1',
storage_directory(diskfile.get_data_dir(0), 'p', storage_directory(diskfile.get_data_dir(0), 'p',
hash_path('a', 'c', 'o')), hash_path('a', 'c', 'o')),
timestamp + '.data') utils.Timestamp(timestamp).internal + '.data')
self.assert_(os.path.isfile(objfile)) self.assert_(os.path.isfile(objfile))
self.assertEquals(open(objfile).read(), 'VERIFY THREE') self.assertEquals(open(objfile).read(), 'VERIFY THREE')
self.assertEquals(diskfile.read_metadata(objfile), self.assertEquals(diskfile.read_metadata(objfile),
{'X-Timestamp': timestamp, {'X-Timestamp': utils.Timestamp(timestamp).internal,
'Content-Length': '12', 'Content-Length': '12',
'ETag': 'b114ab7b90d9ccac4bd5d99cc7ebb568', 'ETag': 'b114ab7b90d9ccac4bd5d99cc7ebb568',
'Content-Type': 'text/plain', 'Content-Type': 'text/plain',
@ -843,7 +843,7 @@ class TestObjectController(unittest.TestCase):
self.testdir, 'sda1', self.testdir, 'sda1',
storage_directory(diskfile.get_data_dir(0), 'p', storage_directory(diskfile.get_data_dir(0), 'p',
hash_path('a', 'c', 'o')), hash_path('a', 'c', 'o')),
timestamp + '.data') utils.Timestamp(timestamp).internal + '.data')
os.unlink(objfile) os.unlink(objfile)
req = Request.blank('/sda1/p/a/c/o', req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'HEAD'}) environ={'REQUEST_METHOD': 'HEAD'})
@ -873,7 +873,8 @@ class TestObjectController(unittest.TestCase):
environ={'REQUEST_METHOD': 'HEAD'}) environ={'REQUEST_METHOD': 'HEAD'})
resp = req.get_response(self.object_controller) resp = req.get_response(self.object_controller)
self.assertEquals(resp.status_int, 404) self.assertEquals(resp.status_int, 404)
self.assertEquals(resp.headers['X-Backend-Timestamp'], timestamp) self.assertEquals(resp.headers['X-Backend-Timestamp'],
utils.Timestamp(timestamp).internal)
def test_HEAD_quarantine_zbyte(self): def test_HEAD_quarantine_zbyte(self):
# Test swift.obj.server.ObjectController.GET # Test swift.obj.server.ObjectController.GET
@ -969,7 +970,7 @@ class TestObjectController(unittest.TestCase):
self.testdir, 'sda1', self.testdir, 'sda1',
storage_directory(diskfile.get_data_dir(0), 'p', storage_directory(diskfile.get_data_dir(0), 'p',
hash_path('a', 'c', 'o')), hash_path('a', 'c', 'o')),
timestamp + '.data') utils.Timestamp(timestamp).internal + '.data')
os.unlink(objfile) os.unlink(objfile)
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'}) req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.object_controller) resp = req.get_response(self.object_controller)
@ -997,7 +998,8 @@ class TestObjectController(unittest.TestCase):
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'}) req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'GET'})
resp = req.get_response(self.object_controller) resp = req.get_response(self.object_controller)
self.assertEquals(resp.status_int, 404) self.assertEquals(resp.status_int, 404)
self.assertEquals(resp.headers['X-Backend-Timestamp'], timestamp) self.assertEquals(resp.headers['X-Backend-Timestamp'],
utils.Timestamp(timestamp).internal)
def test_GET_if_match(self): def test_GET_if_match(self):
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'}, req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
@ -1581,7 +1583,7 @@ class TestObjectController(unittest.TestCase):
self.testdir, 'sda1', self.testdir, 'sda1',
storage_directory(diskfile.get_data_dir(0), 'p', storage_directory(diskfile.get_data_dir(0), 'p',
hash_path('a', 'c', 'o')), hash_path('a', 'c', 'o')),
timestamp + '.ts') utils.Timestamp(timestamp).internal + '.ts')
self.assertTrue(os.path.isfile(ts_1000_file)) self.assertTrue(os.path.isfile(ts_1000_file))
# There should now be a 1000 ts file. # There should now be a 1000 ts file.
self.assertEquals(len(os.listdir(os.path.dirname(ts_1000_file))), 1) self.assertEquals(len(os.listdir(os.path.dirname(ts_1000_file))), 1)
@ -1597,7 +1599,7 @@ class TestObjectController(unittest.TestCase):
self.testdir, 'sda1', self.testdir, 'sda1',
storage_directory(diskfile.get_data_dir(0), 'p', storage_directory(diskfile.get_data_dir(0), 'p',
hash_path('a', 'c', 'o')), hash_path('a', 'c', 'o')),
timestamp + '.ts') utils.Timestamp(timestamp).internal + '.ts')
self.assertFalse(os.path.isfile(ts_999_file)) self.assertFalse(os.path.isfile(ts_999_file))
self.assertTrue(os.path.isfile(ts_1000_file)) self.assertTrue(os.path.isfile(ts_1000_file))
self.assertEquals(len(os.listdir(os.path.dirname(ts_1000_file))), 1) self.assertEquals(len(os.listdir(os.path.dirname(ts_1000_file))), 1)
@ -1617,7 +1619,7 @@ class TestObjectController(unittest.TestCase):
self.testdir, 'sda1', self.testdir, 'sda1',
storage_directory(diskfile.get_data_dir(0), 'p', storage_directory(diskfile.get_data_dir(0), 'p',
hash_path('a', 'c', 'o')), hash_path('a', 'c', 'o')),
timestamp + '.data') utils.Timestamp(timestamp).internal + '.data')
self.assertTrue(os.path.isfile(data_1002_file)) self.assertTrue(os.path.isfile(data_1002_file))
self.assertEquals(len(os.listdir(os.path.dirname(data_1002_file))), 1) self.assertEquals(len(os.listdir(os.path.dirname(data_1002_file))), 1)
@ -1632,7 +1634,7 @@ class TestObjectController(unittest.TestCase):
self.testdir, 'sda1', self.testdir, 'sda1',
storage_directory(diskfile.get_data_dir(0), 'p', storage_directory(diskfile.get_data_dir(0), 'p',
hash_path('a', 'c', 'o')), hash_path('a', 'c', 'o')),
timestamp + '.ts') utils.Timestamp(timestamp).internal + '.ts')
self.assertFalse(os.path.isfile(ts_1001_file)) self.assertFalse(os.path.isfile(ts_1001_file))
self.assertTrue(os.path.isfile(data_1002_file)) self.assertTrue(os.path.isfile(data_1002_file))
self.assertEquals(len(os.listdir(os.path.dirname(ts_1001_file))), 1) self.assertEquals(len(os.listdir(os.path.dirname(ts_1001_file))), 1)
@ -1647,7 +1649,7 @@ class TestObjectController(unittest.TestCase):
self.testdir, 'sda1', self.testdir, 'sda1',
storage_directory(diskfile.get_data_dir(0), 'p', storage_directory(diskfile.get_data_dir(0), 'p',
hash_path('a', 'c', 'o')), hash_path('a', 'c', 'o')),
timestamp + '.ts') utils.Timestamp(timestamp).internal + '.ts')
self.assertTrue(os.path.isfile(ts_1003_file)) self.assertTrue(os.path.isfile(ts_1003_file))
self.assertEquals(len(os.listdir(os.path.dirname(ts_1003_file))), 1) self.assertEquals(len(os.listdir(os.path.dirname(ts_1003_file))), 1)
@ -1655,10 +1657,11 @@ class TestObjectController(unittest.TestCase):
# Test swift.obj.server.ObjectController.DELETE and container # Test swift.obj.server.ObjectController.DELETE and container
# updates, making sure container update is called in the correct # updates, making sure container update is called in the correct
# state. # state.
timestamp = normalize_timestamp(time()) start = time()
timestamp = utils.Timestamp(start)
req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'}, req = Request.blank('/sda1/p/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={ headers={
'X-Timestamp': timestamp, 'X-Timestamp': timestamp.internal,
'Content-Type': 'application/octet-stream', 'Content-Type': 'application/octet-stream',
'Content-Length': '4', 'Content-Length': '4',
}) })
@ -1676,17 +1679,17 @@ class TestObjectController(unittest.TestCase):
try: try:
# The following request should return 409 (HTTP Conflict). A # The following request should return 409 (HTTP Conflict). A
# tombstone file should not have been created with this timestamp. # tombstone file should not have been created with this timestamp.
timestamp = normalize_timestamp(float(timestamp) - 1) timestamp = utils.Timestamp(start - 0.00001)
req = Request.blank('/sda1/p/a/c/o', req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'DELETE'}, environ={'REQUEST_METHOD': 'DELETE'},
headers={'X-Timestamp': timestamp}) headers={'X-Timestamp': timestamp.internal})
resp = req.get_response(self.object_controller) resp = req.get_response(self.object_controller)
self.assertEquals(resp.status_int, 409) self.assertEquals(resp.status_int, 409)
objfile = os.path.join( objfile = os.path.join(
self.testdir, 'sda1', self.testdir, 'sda1',
storage_directory(diskfile.get_data_dir(0), 'p', storage_directory(diskfile.get_data_dir(0), 'p',
hash_path('a', 'c', 'o')), hash_path('a', 'c', 'o')),
timestamp + '.ts') utils.Timestamp(timestamp).internal + '.ts')
self.assertFalse(os.path.isfile(objfile)) self.assertFalse(os.path.isfile(objfile))
self.assertEquals(len(os.listdir(os.path.dirname(objfile))), 1) self.assertEquals(len(os.listdir(os.path.dirname(objfile))), 1)
self.assertEquals(0, calls_made[0]) self.assertEquals(0, calls_made[0])
@ -1695,18 +1698,17 @@ class TestObjectController(unittest.TestCase):
# be truly deleted (container update is performed) because this # be truly deleted (container update is performed) because this
# timestamp is newer. A tombstone file should have been created # timestamp is newer. A tombstone file should have been created
# with this timestamp. # with this timestamp.
sleep(.00001) timestamp = utils.Timestamp(start + 0.00001)
timestamp = normalize_timestamp(time())
req = Request.blank('/sda1/p/a/c/o', req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'DELETE'}, environ={'REQUEST_METHOD': 'DELETE'},
headers={'X-Timestamp': timestamp}) headers={'X-Timestamp': timestamp.internal})
resp = req.get_response(self.object_controller) resp = req.get_response(self.object_controller)
self.assertEquals(resp.status_int, 204) self.assertEquals(resp.status_int, 204)
objfile = os.path.join( objfile = os.path.join(
self.testdir, 'sda1', self.testdir, 'sda1',
storage_directory(diskfile.get_data_dir(0), 'p', storage_directory(diskfile.get_data_dir(0), 'p',
hash_path('a', 'c', 'o')), hash_path('a', 'c', 'o')),
timestamp + '.ts') utils.Timestamp(timestamp).internal + '.ts')
self.assert_(os.path.isfile(objfile)) self.assert_(os.path.isfile(objfile))
self.assertEquals(1, calls_made[0]) self.assertEquals(1, calls_made[0])
self.assertEquals(len(os.listdir(os.path.dirname(objfile))), 1) self.assertEquals(len(os.listdir(os.path.dirname(objfile))), 1)
@ -1715,18 +1717,17 @@ class TestObjectController(unittest.TestCase):
# already have been deleted, but it should have also performed a # already have been deleted, but it should have also performed a
# container update because the timestamp is newer, and a tombstone # container update because the timestamp is newer, and a tombstone
# file should also exist with this timestamp. # file should also exist with this timestamp.
sleep(.00001) timestamp = utils.Timestamp(start + 0.00002)
timestamp = normalize_timestamp(time())
req = Request.blank('/sda1/p/a/c/o', req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'DELETE'}, environ={'REQUEST_METHOD': 'DELETE'},
headers={'X-Timestamp': timestamp}) headers={'X-Timestamp': timestamp.internal})
resp = req.get_response(self.object_controller) resp = req.get_response(self.object_controller)
self.assertEquals(resp.status_int, 404) self.assertEquals(resp.status_int, 404)
objfile = os.path.join( objfile = os.path.join(
self.testdir, 'sda1', self.testdir, 'sda1',
storage_directory(diskfile.get_data_dir(0), 'p', storage_directory(diskfile.get_data_dir(0), 'p',
hash_path('a', 'c', 'o')), hash_path('a', 'c', 'o')),
timestamp + '.ts') utils.Timestamp(timestamp).internal + '.ts')
self.assert_(os.path.isfile(objfile)) self.assert_(os.path.isfile(objfile))
self.assertEquals(2, calls_made[0]) self.assertEquals(2, calls_made[0])
self.assertEquals(len(os.listdir(os.path.dirname(objfile))), 1) self.assertEquals(len(os.listdir(os.path.dirname(objfile))), 1)
@ -1735,23 +1736,209 @@ class TestObjectController(unittest.TestCase):
# already have been deleted, and it should not have performed a # already have been deleted, and it should not have performed a
# container update because the timestamp is older, or created a # container update because the timestamp is older, or created a
# tombstone file with this timestamp. # tombstone file with this timestamp.
timestamp = normalize_timestamp(float(timestamp) - 1) timestamp = utils.Timestamp(start + 0.00001)
req = Request.blank('/sda1/p/a/c/o', req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'DELETE'}, environ={'REQUEST_METHOD': 'DELETE'},
headers={'X-Timestamp': timestamp}) headers={'X-Timestamp': timestamp.internal})
resp = req.get_response(self.object_controller) resp = req.get_response(self.object_controller)
self.assertEquals(resp.status_int, 404) self.assertEquals(resp.status_int, 404)
objfile = os.path.join( objfile = os.path.join(
self.testdir, 'sda1', self.testdir, 'sda1',
storage_directory(diskfile.get_data_dir(0), 'p', storage_directory(diskfile.get_data_dir(0), 'p',
hash_path('a', 'c', 'o')), hash_path('a', 'c', 'o')),
timestamp + '.ts') utils.Timestamp(timestamp).internal + '.ts')
self.assertFalse(os.path.isfile(objfile)) self.assertFalse(os.path.isfile(objfile))
self.assertEquals(2, calls_made[0]) self.assertEquals(2, calls_made[0])
self.assertEquals(len(os.listdir(os.path.dirname(objfile))), 1) self.assertEquals(len(os.listdir(os.path.dirname(objfile))), 1)
finally: finally:
self.object_controller.container_update = orig_cu self.object_controller.container_update = orig_cu
def test_object_update_with_offset(self):
ts = (utils.Timestamp(t).internal for t in
itertools.count(int(time())))
container_updates = []
def capture_updates(ip, port, method, path, headers, *args, **kwargs):
container_updates.append((ip, port, method, path, headers))
# create a new object
create_timestamp = ts.next()
req = Request.blank('/sda1/p/a/c/o', method='PUT', body='test1',
headers={'X-Timestamp': create_timestamp,
'X-Container-Host': '10.0.0.1:8080',
'X-Container-Device': 'sda1',
'X-Container-Partition': 'p',
'Content-Type': 'text/plain'})
with mocked_http_conn(
200, give_connect=capture_updates) as fake_conn:
resp = req.get_response(self.object_controller)
self.assertRaises(StopIteration, fake_conn.code_iter.next)
self.assertEqual(resp.status_int, 201)
self.assertEquals(1, len(container_updates))
for update in container_updates:
ip, port, method, path, headers = update
self.assertEqual(ip, '10.0.0.1')
self.assertEqual(port, '8080')
self.assertEqual(method, 'PUT')
self.assertEqual(path, '/sda1/p/a/c/o')
expected = {
'X-Size': len('test1'),
'X-Etag': md5('test1').hexdigest(),
'X-Content-Type': 'text/plain',
'X-Timestamp': create_timestamp,
}
for key, value in expected.items():
self.assertEqual(headers[key], str(value))
container_updates = [] # reset
# read back object
req = Request.blank('/sda1/p/a/c/o', method='GET')
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.headers['X-Timestamp'],
utils.Timestamp(create_timestamp).normal)
self.assertEqual(resp.headers['X-Backend-Timestamp'],
create_timestamp)
self.assertEqual(resp.body, 'test1')
# send an update with an offset
offset_timestamp = utils.Timestamp(
create_timestamp, offset=1).internal
req = Request.blank('/sda1/p/a/c/o', method='PUT', body='test2',
headers={'X-Timestamp': offset_timestamp,
'X-Container-Host': '10.0.0.1:8080',
'X-Container-Device': 'sda1',
'X-Container-Partition': 'p',
'Content-Type': 'text/html'})
with mocked_http_conn(
200, give_connect=capture_updates) as fake_conn:
resp = req.get_response(self.object_controller)
self.assertRaises(StopIteration, fake_conn.code_iter.next)
self.assertEqual(resp.status_int, 201)
self.assertEquals(1, len(container_updates))
for update in container_updates:
ip, port, method, path, headers = update
self.assertEqual(ip, '10.0.0.1')
self.assertEqual(port, '8080')
self.assertEqual(method, 'PUT')
self.assertEqual(path, '/sda1/p/a/c/o')
expected = {
'X-Size': len('test2'),
'X-Etag': md5('test2').hexdigest(),
'X-Content-Type': 'text/html',
'X-Timestamp': offset_timestamp,
}
for key, value in expected.items():
self.assertEqual(headers[key], str(value))
container_updates = [] # reset
# read back new offset
req = Request.blank('/sda1/p/a/c/o', method='GET')
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.headers['X-Timestamp'],
utils.Timestamp(offset_timestamp).normal)
self.assertEqual(resp.headers['X-Backend-Timestamp'],
offset_timestamp)
self.assertEqual(resp.body, 'test2')
# now overwrite with a newer time
overwrite_timestamp = ts.next()
req = Request.blank('/sda1/p/a/c/o', method='PUT', body='test3',
headers={'X-Timestamp': overwrite_timestamp,
'X-Container-Host': '10.0.0.1:8080',
'X-Container-Device': 'sda1',
'X-Container-Partition': 'p',
'Content-Type': 'text/enriched'})
with mocked_http_conn(
200, give_connect=capture_updates) as fake_conn:
resp = req.get_response(self.object_controller)
self.assertRaises(StopIteration, fake_conn.code_iter.next)
self.assertEqual(resp.status_int, 201)
self.assertEquals(1, len(container_updates))
for update in container_updates:
ip, port, method, path, headers = update
self.assertEqual(ip, '10.0.0.1')
self.assertEqual(port, '8080')
self.assertEqual(method, 'PUT')
self.assertEqual(path, '/sda1/p/a/c/o')
expected = {
'X-Size': len('test3'),
'X-Etag': md5('test3').hexdigest(),
'X-Content-Type': 'text/enriched',
'X-Timestamp': overwrite_timestamp,
}
for key, value in expected.items():
self.assertEqual(headers[key], str(value))
container_updates = [] # reset
# read back overwrite
req = Request.blank('/sda1/p/a/c/o', method='GET')
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.headers['X-Timestamp'],
utils.Timestamp(overwrite_timestamp).normal)
self.assertEqual(resp.headers['X-Backend-Timestamp'],
overwrite_timestamp)
self.assertEqual(resp.body, 'test3')
# delete with an offset
offset_delete = utils.Timestamp(overwrite_timestamp,
offset=1).internal
req = Request.blank('/sda1/p/a/c/o', method='DELETE',
headers={'X-Timestamp': offset_delete,
'X-Container-Host': '10.0.0.1:8080',
'X-Container-Device': 'sda1',
'X-Container-Partition': 'p'})
with mocked_http_conn(
200, give_connect=capture_updates) as fake_conn:
resp = req.get_response(self.object_controller)
self.assertRaises(StopIteration, fake_conn.code_iter.next)
self.assertEqual(resp.status_int, 204)
self.assertEquals(1, len(container_updates))
for update in container_updates:
ip, port, method, path, headers = update
self.assertEqual(ip, '10.0.0.1')
self.assertEqual(port, '8080')
self.assertEqual(method, 'DELETE')
self.assertEqual(path, '/sda1/p/a/c/o')
expected = {
'X-Timestamp': offset_delete,
}
for key, value in expected.items():
self.assertEqual(headers[key], str(value))
container_updates = [] # reset
# read back offset delete
req = Request.blank('/sda1/p/a/c/o', method='GET')
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 404)
self.assertEqual(resp.headers['X-Timestamp'], None)
self.assertEqual(resp.headers['X-Backend-Timestamp'], offset_delete)
# and one more delete with a newer timestamp
delete_timestamp = ts.next()
req = Request.blank('/sda1/p/a/c/o', method='DELETE',
headers={'X-Timestamp': delete_timestamp,
'X-Container-Host': '10.0.0.1:8080',
'X-Container-Device': 'sda1',
'X-Container-Partition': 'p'})
with mocked_http_conn(
200, give_connect=capture_updates) as fake_conn:
resp = req.get_response(self.object_controller)
self.assertRaises(StopIteration, fake_conn.code_iter.next)
self.assertEqual(resp.status_int, 404)
self.assertEquals(1, len(container_updates))
for update in container_updates:
ip, port, method, path, headers = update
self.assertEqual(ip, '10.0.0.1')
self.assertEqual(port, '8080')
self.assertEqual(method, 'DELETE')
self.assertEqual(path, '/sda1/p/a/c/o')
expected = {
'X-Timestamp': delete_timestamp,
}
for key, value in expected.items():
self.assertEqual(headers[key], str(value))
container_updates = [] # reset
# read back delete
req = Request.blank('/sda1/p/a/c/o', method='GET')
resp = req.get_response(self.object_controller)
self.assertEqual(resp.status_int, 404)
self.assertEqual(resp.headers['X-Timestamp'], None)
self.assertEqual(resp.headers['X-Backend-Timestamp'], delete_timestamp)
def test_call_bad_request(self): def test_call_bad_request(self):
# Test swift.obj.server.ObjectController.__call__ # Test swift.obj.server.ObjectController.__call__
inbuf = StringIO() inbuf = StringIO()
@ -2208,7 +2395,7 @@ class TestObjectController(unittest.TestCase):
'x-content-type': 'application/burrito', 'x-content-type': 'application/burrito',
'x-etag': 'd41d8cd98f00b204e9800998ecf8427e', 'x-etag': 'd41d8cd98f00b204e9800998ecf8427e',
'x-size': '0', 'x-size': '0',
'x-timestamp': '12345', 'x-timestamp': utils.Timestamp('12345').internal,
POLICY_INDEX: '37', POLICY_INDEX: '37',
'referer': 'PUT http://localhost/sda1/p/a/c/o', 'referer': 'PUT http://localhost/sda1/p/a/c/o',
'user-agent': 'obj-server %d' % os.getpid(), 'user-agent': 'obj-server %d' % os.getpid(),
@ -2227,7 +2414,7 @@ class TestObjectController(unittest.TestCase):
'x-content-type': 'text/plain', 'x-content-type': 'text/plain',
'x-etag': 'd41d8cd98f00b204e9800998ecf8427e', 'x-etag': 'd41d8cd98f00b204e9800998ecf8427e',
'x-size': '0', 'x-size': '0',
'x-timestamp': '12345', 'x-timestamp': utils.Timestamp('12345').internal,
'referer': 'PUT http://localhost/sda1/p/a/c/o', 'referer': 'PUT http://localhost/sda1/p/a/c/o',
'user-agent': 'obj-server %d' % os.getpid(), 'user-agent': 'obj-server %d' % os.getpid(),
POLICY_INDEX: 0, # system account storage policy is 0 POLICY_INDEX: 0, # system account storage policy is 0
@ -2245,7 +2432,7 @@ class TestObjectController(unittest.TestCase):
'x-content-type': 'text/plain', 'x-content-type': 'text/plain',
'x-etag': 'd41d8cd98f00b204e9800998ecf8427e', 'x-etag': 'd41d8cd98f00b204e9800998ecf8427e',
'x-size': '0', 'x-size': '0',
'x-timestamp': '12345', 'x-timestamp': utils.Timestamp('12345').internal,
'referer': 'PUT http://localhost/sda1/p/a/c/o', 'referer': 'PUT http://localhost/sda1/p/a/c/o',
'user-agent': 'obj-server %d' % os.getpid(), 'user-agent': 'obj-server %d' % os.getpid(),
POLICY_INDEX: 0, # system account storage policy is 0 POLICY_INDEX: 0, # system account storage policy is 0
@ -2314,7 +2501,7 @@ class TestObjectController(unittest.TestCase):
'x-content-type': 'application/burrito', 'x-content-type': 'application/burrito',
'x-etag': 'd41d8cd98f00b204e9800998ecf8427e', 'x-etag': 'd41d8cd98f00b204e9800998ecf8427e',
'x-size': '0', 'x-size': '0',
'x-timestamp': '12345', 'x-timestamp': utils.Timestamp('12345').internal,
POLICY_INDEX: '26', POLICY_INDEX: '26',
'referer': 'PUT http://localhost/sda1/p/a/c/o', 'referer': 'PUT http://localhost/sda1/p/a/c/o',
'user-agent': 'obj-server %d' % os.getpid(), 'user-agent': 'obj-server %d' % os.getpid(),
@ -2332,7 +2519,7 @@ class TestObjectController(unittest.TestCase):
'x-content-type': 'application/burrito', 'x-content-type': 'application/burrito',
'x-etag': 'd41d8cd98f00b204e9800998ecf8427e', 'x-etag': 'd41d8cd98f00b204e9800998ecf8427e',
'x-size': '0', 'x-size': '0',
'x-timestamp': '12345', 'x-timestamp': utils.Timestamp('12345').internal,
POLICY_INDEX: '26', POLICY_INDEX: '26',
'referer': 'PUT http://localhost/sda1/p/a/c/o', 'referer': 'PUT http://localhost/sda1/p/a/c/o',
'user-agent': 'obj-server %d' % os.getpid(), 'user-agent': 'obj-server %d' % os.getpid(),
@ -2340,7 +2527,7 @@ class TestObjectController(unittest.TestCase):
def test_object_delete_at_aysnc_update(self): def test_object_delete_at_aysnc_update(self):
policy = random.choice(list(POLICIES)) policy = random.choice(list(POLICIES))
ts = (normalize_timestamp(t) for t in ts = (utils.Timestamp(t) for t in
itertools.count(int(time()))) itertools.count(int(time())))
container_updates = [] container_updates = []
@ -2348,8 +2535,9 @@ class TestObjectController(unittest.TestCase):
def capture_updates(ip, port, method, path, headers, *args, **kwargs): def capture_updates(ip, port, method, path, headers, *args, **kwargs):
container_updates.append((ip, port, method, path, headers)) container_updates.append((ip, port, method, path, headers))
put_timestamp = ts.next() put_timestamp = ts.next().internal
delete_at_timestamp = utils.normalize_delete_at_timestamp(ts.next()) delete_at_timestamp = utils.normalize_delete_at_timestamp(
ts.next().normal)
delete_at_container = ( delete_at_container = (
int(delete_at_timestamp) / int(delete_at_timestamp) /
self.object_controller.expiring_objects_container_divisor * self.object_controller.expiring_objects_container_divisor *
@ -2441,7 +2629,8 @@ class TestObjectController(unittest.TestCase):
self.assertEquals( self.assertEquals(
pickle.load(open(os.path.join( pickle.load(open(os.path.join(
self.testdir, 'sda1', async_dir, 'a83', self.testdir, 'sda1', async_dir, 'a83',
'06fbf0b514e5199dfc4e00f42eb5ea83-0000000001.00000'))), '06fbf0b514e5199dfc4e00f42eb5ea83-%s' %
utils.Timestamp(1).internal))),
{'headers': {'x-timestamp': '1', 'x-out': 'set', {'headers': {'x-timestamp': '1', 'x-out': 'set',
'user-agent': 'obj-server %s' % os.getpid(), 'user-agent': 'obj-server %s' % os.getpid(),
POLICY_INDEX: policy.idx}, POLICY_INDEX: policy.idx},
@ -2480,7 +2669,8 @@ class TestObjectController(unittest.TestCase):
self.assertEquals( self.assertEquals(
pickle.load(open(os.path.join( pickle.load(open(os.path.join(
self.testdir, 'sda1', async_dir, 'a83', self.testdir, 'sda1', async_dir, 'a83',
'06fbf0b514e5199dfc4e00f42eb5ea83-0000000001.00000'))), '06fbf0b514e5199dfc4e00f42eb5ea83-%s' %
utils.Timestamp(1).internal))),
{'headers': {'x-timestamp': '1', 'x-out': str(status), {'headers': {'x-timestamp': '1', 'x-out': str(status),
'user-agent': 'obj-server %s' % os.getpid(), 'user-agent': 'obj-server %s' % os.getpid(),
POLICY_INDEX: policy.idx}, POLICY_INDEX: policy.idx},
@ -2552,7 +2742,8 @@ class TestObjectController(unittest.TestCase):
self.assertTrue( self.assertTrue(
os.path.exists(os.path.join( os.path.exists(os.path.join(
self.testdir, 'sda1', async_dir, 'a83', self.testdir, 'sda1', async_dir, 'a83',
'06fbf0b514e5199dfc4e00f42eb5ea83-0000000001.00000'))) '06fbf0b514e5199dfc4e00f42eb5ea83-%s' %
utils.Timestamp(1).internal)))
finally: finally:
object_server.http_connect = orig_http_connect object_server.http_connect = orig_http_connect
utils.HASH_PATH_PREFIX = _prefix utils.HASH_PATH_PREFIX = _prefix
@ -2607,7 +2798,7 @@ class TestObjectController(unittest.TestCase):
'x-size': '0', 'x-size': '0',
'x-etag': 'd41d8cd98f00b204e9800998ecf8427e', 'x-etag': 'd41d8cd98f00b204e9800998ecf8427e',
'x-content-type': 'text/plain', 'x-content-type': 'text/plain',
'x-timestamp': '1', 'x-timestamp': utils.Timestamp(1).internal,
POLICY_INDEX: '0', # default when not given POLICY_INDEX: '0', # default when not given
'x-trans-id': '123', 'x-trans-id': '123',
'referer': 'PUT http://localhost/sda1/0/a/c/o'})) 'referer': 'PUT http://localhost/sda1/0/a/c/o'}))
@ -2639,14 +2830,14 @@ class TestObjectController(unittest.TestCase):
self.assertEqual(account, 'a') self.assertEqual(account, 'a')
self.assertEqual(container, 'c') self.assertEqual(container, 'c')
self.assertEqual(obj, 'o') self.assertEqual(obj, 'o')
self.assertEqual(timestamp, '1') self.assertEqual(timestamp, utils.Timestamp(1).internal)
self.assertEqual(policy_index, 0) self.assertEqual(policy_index, 0)
self.assertEqual(data, { self.assertEqual(data, {
'headers': HeaderKeyDict({ 'headers': HeaderKeyDict({
'X-Size': '0', 'X-Size': '0',
'User-Agent': 'obj-server %s' % os.getpid(), 'User-Agent': 'obj-server %s' % os.getpid(),
'X-Content-Type': 'text/plain', 'X-Content-Type': 'text/plain',
'X-Timestamp': '1', 'X-Timestamp': utils.Timestamp(1).internal,
'X-Trans-Id': '123', 'X-Trans-Id': '123',
'Referer': 'PUT http://localhost/sda1/0/a/c/o', 'Referer': 'PUT http://localhost/sda1/0/a/c/o',
'X-Backend-Storage-Policy-Index': '0', 'X-Backend-Storage-Policy-Index': '0',
@ -2710,7 +2901,7 @@ class TestObjectController(unittest.TestCase):
'0000000002-a/c/o', None, None, None, '0000000002-a/c/o', None, None, None,
HeaderKeyDict({ HeaderKeyDict({
POLICY_INDEX: 0, POLICY_INDEX: 0,
'x-timestamp': '1', 'x-timestamp': utils.Timestamp('1').internal,
'x-trans-id': '123', 'x-trans-id': '123',
'referer': 'PUT http://localhost/v1/a/c/o'}), 'referer': 'PUT http://localhost/v1/a/c/o'}),
'sda1', 0]) 'sda1', 0])
@ -2737,7 +2928,7 @@ class TestObjectController(unittest.TestCase):
None, None, None, None, None, None,
HeaderKeyDict({ HeaderKeyDict({
POLICY_INDEX: 0, POLICY_INDEX: 0,
'x-timestamp': '1', 'x-timestamp': utils.Timestamp('1').internal,
'x-trans-id': '1234', 'x-trans-id': '1234',
'referer': 'PUT http://localhost/v1/a/c/o'}), 'referer': 'PUT http://localhost/v1/a/c/o'}),
'sda1', 0]) 'sda1', 0])
@ -2764,7 +2955,7 @@ class TestObjectController(unittest.TestCase):
None, None, None, None, None, None,
HeaderKeyDict({ HeaderKeyDict({
POLICY_INDEX: 0, POLICY_INDEX: 0,
'x-timestamp': '1', 'x-timestamp': utils.Timestamp('1').internal,
'x-trans-id': '1234', 'x-trans-id': '1234',
'referer': 'PUT http://localhost/v1/a/c/o'}), 'referer': 'PUT http://localhost/v1/a/c/o'}),
'sda1', 0]) 'sda1', 0])
@ -2799,7 +2990,7 @@ class TestObjectController(unittest.TestCase):
'x-size': '0', 'x-size': '0',
'x-etag': 'd41d8cd98f00b204e9800998ecf8427e', 'x-etag': 'd41d8cd98f00b204e9800998ecf8427e',
'x-content-type': 'text/plain', 'x-content-type': 'text/plain',
'x-timestamp': '1', 'x-timestamp': utils.Timestamp('1').internal,
'x-trans-id': '1234', 'x-trans-id': '1234',
'referer': 'PUT http://localhost/v1/a/c/o'}), 'referer': 'PUT http://localhost/v1/a/c/o'}),
'sda1', 0]) 'sda1', 0])
@ -2850,7 +3041,8 @@ class TestObjectController(unittest.TestCase):
'0000000002-a/c/o', None, None, '0000000002-a/c/o', None, None,
None, HeaderKeyDict({ None, HeaderKeyDict({
POLICY_INDEX: 0, POLICY_INDEX: 0,
'x-timestamp': '1', 'x-trans-id': '1234', 'x-timestamp': utils.Timestamp('1').internal,
'x-trans-id': '1234',
'referer': 'DELETE http://localhost/v1/a/c/o'}), 'referer': 'DELETE http://localhost/v1/a/c/o'}),
'sda1', 0]) 'sda1', 0])
@ -3061,7 +3253,7 @@ class TestObjectController(unittest.TestCase):
resp = req.get_response(self.object_controller) resp = req.get_response(self.object_controller)
self.assertEquals(resp.status_int, 404) self.assertEquals(resp.status_int, 404)
self.assertEquals(resp.headers['X-Backend-Timestamp'], self.assertEquals(resp.headers['X-Backend-Timestamp'],
put_timestamp) utils.Timestamp(put_timestamp))
finally: finally:
object_server.time.time = orig_time object_server.time.time = orig_time
@ -3131,7 +3323,7 @@ class TestObjectController(unittest.TestCase):
resp = req.get_response(self.object_controller) resp = req.get_response(self.object_controller)
self.assertEquals(resp.status_int, 404) self.assertEquals(resp.status_int, 404)
self.assertEquals(resp.headers['X-Backend-Timestamp'], self.assertEquals(resp.headers['X-Backend-Timestamp'],
put_timestamp) utils.Timestamp(put_timestamp))
finally: finally:
object_server.time.time = orig_time object_server.time.time = orig_time
@ -3251,7 +3443,7 @@ class TestObjectController(unittest.TestCase):
self.testdir, 'sda1', self.testdir, 'sda1',
storage_directory(diskfile.get_data_dir(0), 'p', storage_directory(diskfile.get_data_dir(0), 'p',
hash_path('a', 'c', 'o')), hash_path('a', 'c', 'o')),
test_timestamp + '.data') utils.Timestamp(test_timestamp).internal + '.data')
self.assert_(os.path.isfile(objfile)) self.assert_(os.path.isfile(objfile))
# move time past expirery # move time past expirery