made x_container_sync_row its own column

This commit is contained in:
gholt 2011-02-24 11:37:55 -08:00
parent adb45bc871
commit e9b7815e23
6 changed files with 58 additions and 36 deletions

View File

@ -665,7 +665,8 @@ class ContainerBroker(DatabaseBroker):
id TEXT,
status TEXT DEFAULT '',
status_changed_at TEXT DEFAULT '0',
metadata TEXT DEFAULT ''
metadata TEXT DEFAULT '',
x_container_sync_row INTEGER DEFAULT -1
);
INSERT INTO container_stat (object_count, bytes_used)
@ -873,10 +874,11 @@ class ContainerBroker(DatabaseBroker):
"""
Get global data for the container.
:returns: a tuple of (account, container, created_at, put_timestamp,
delete_timestamp, object_count, bytes_used,
reported_put_timestamp, reported_delete_timestamp,
reported_object_count, reported_bytes_used, hash, id)
:returns: a dict with at least the following keys: account, container,
created_at, put_timestamp, delete_timestamp, object_count,
bytes_used, reported_put_timestamp,
reported_delete_timestamp, reported_object_count,
reported_bytes_used, hash, id, and x_container_sync_row
"""
try:
self._commit_puts()
@ -884,13 +886,46 @@ class ContainerBroker(DatabaseBroker):
if not self.stale_reads_ok:
raise
with self.get() as conn:
return conn.execute('''
SELECT account, container, created_at, put_timestamp,
delete_timestamp, object_count, bytes_used,
reported_put_timestamp, reported_delete_timestamp,
reported_object_count, reported_bytes_used, hash, id
FROM container_stat
''').fetchone()
try:
return conn.execute('''
SELECT account, container, created_at, put_timestamp,
delete_timestamp, object_count, bytes_used,
reported_put_timestamp, reported_delete_timestamp,
reported_object_count, reported_bytes_used, hash, id,
x_container_sync_row
FROM container_stat
''').fetchone()
except sqlite3.OperationalError, err:
if 'no such column: x_container_sync_row' not in str(err):
raise
return conn.execute('''
SELECT account, container, created_at, put_timestamp,
delete_timestamp, object_count, bytes_used,
reported_put_timestamp, reported_delete_timestamp,
reported_object_count, reported_bytes_used, hash, id,
-1 AS x_container_sync_row
FROM container_stat
''').fetchone()
def set_x_container_sync_row(self, value):
with self.get() as conn:
try:
conn.execute('''
UPDATE container_stat
SET x_container_sync_row = ?
''', (value,))
except sqlite3.OperationalError, err:
if 'no such column: x_container_sync_row' not in str(err):
raise
conn.execute('''
ALTER TABLE container_stat
ADD COLUMN x_container_sync_row INTEGER DEFAULT -1
''')
conn.execute('''
UPDATE container_stat
SET x_container_sync_row = ?
''', (value,))
conn.commit()
def reported(self, put_timestamp, delete_timestamp, object_count,
bytes_used):
@ -1397,9 +1432,9 @@ class AccountBroker(DatabaseBroker):
"""
Get global data for the account.
:returns: a tuple of (account, created_at, put_timestamp,
delete_timestamp, container_count, object_count,
bytes_used, hash, id)
:returns: a dict with at least the following keys: account, created_at,
put_timestamp, delete_timestamp, container_count,
object_count, bytes_used, hash, id
"""
try:
self._commit_puts()

View File

@ -187,7 +187,7 @@ class DevAuth(object):
# account DELETE or PUT...
req.environ['swift_owner'] = True
return None
if ('swift_sync_key' in req.environ and
if (req.environ.get('swift_sync_key') and
req.environ['swift_sync_key'] ==
req.headers.get('x-container-sync-key', None) and
(req.remote_addr in self.allowed_sync_hosts or

View File

@ -280,7 +280,7 @@ class Swauth(object):
# account DELETE or PUT...
req.environ['swift_owner'] = True
return None
if ('swift_sync_key' in req.environ and
if (req.environ.get('swift_sync_key') and
req.environ['swift_sync_key'] ==
req.headers.get('x-container-sync-key', None) and
(req.remote_addr in self.allowed_sync_hosts or

View File

@ -137,19 +137,12 @@ class ContainerSync(Daemon):
if not broker.is_deleted():
sync_to = None
sync_key = None
sync_row = -1
sync_row = info['x_container_sync_row']
for key, (value, timestamp) in broker.metadata.iteritems():
if key.lower() == 'x-container-sync-to':
sync_to = value
elif key.lower() == 'x-container-sync-key':
sync_key = value
# TODO: Make this a separate column, not a metadata item.
# Each db should track what it has synced separately and
# these metadata get ovewritten by newer values from other
# dbs. Also, once a new column, it'll need special
# attention when doing a fresh db copy.
elif key.lower() == 'x-container-sync-row':
sync_row = int(value)
if not sync_to or not sync_key:
self.container_skips += 1
return
@ -171,8 +164,7 @@ class ContainerSync(Daemon):
broker, info):
return
sync_row = rows[0]['ROWID']
broker.update_metadata({'X-Container-Sync-Row':
(str(sync_row), normalize_timestamp(time.time()))})
broker.set_x_container_sync_row(sync_row)
self.container_syncs += 1
except Exception:
self.container_failures += 1

View File

@ -87,15 +87,10 @@ class AccountStat(Daemon):
db_path = os.path.join(root, filename)
broker = AccountBroker(db_path)
if not broker.is_deleted():
(account_name,
_junk, _junk, _junk,
container_count,
object_count,
bytes_used,
_junk, _junk) = broker.get_info()
info = broker.get_info()
line_data = '"%s",%d,%d,%d\n' % (
account_name, container_count,
object_count, bytes_used)
info['account'], info['container_count'],
info['object_count'], info['bytes_used'])
statfile.write(line_data)
hasher.update(line_data)
file_hash = hasher.hexdigest()

View File

@ -215,7 +215,7 @@ class TestObject(unittest.TestCase):
conn.request('PUT', '%s/%s/%s' % (parsed.path,
shared_container,
'private_object'),
'', {'X-Auth-Token': token,
'', {'User-Agent': 'GLHUA', 'X-Auth-Token': token,
'Content-Length': '0',
'X-Copy-From': '%s/%s' % (self.container,
self.obj)})