Merge remote-tracking branch 'origin/master' into ttl

This commit is contained in:
Jon Haddad
2013-10-24 17:36:14 -07:00
6 changed files with 102 additions and 16 deletions

View File

@@ -256,6 +256,10 @@ class Integer(Column):
return self.validate(value) return self.validate(value)
class BigInt(Integer):
db_type = 'bigint'
class VarInt(Column): class VarInt(Column):
db_type = 'varint' db_type = 'varint'
@@ -332,11 +336,9 @@ class DateTime(Column):
else: else:
raise ValidationError("'{}' is not a datetime object".format(value)) raise ValidationError("'{}' is not a datetime object".format(value))
epoch = datetime(1970, 1, 1, tzinfo=value.tzinfo) epoch = datetime(1970, 1, 1, tzinfo=value.tzinfo)
offset = 0 offset = epoch.tzinfo.utcoffset(epoch).total_seconds() if epoch.tzinfo else 0
if epoch.tzinfo:
offset_delta = epoch.tzinfo.utcoffset(epoch) return long(((value - epoch).total_seconds() - offset) * 1000)
offset = offset_delta.days*24*3600 + offset_delta.seconds
return long(((value - epoch).total_seconds() - offset) * 1000)
class Date(Column): class Date(Column):
@@ -406,12 +408,7 @@ class TimeUUID(UUID):
global _last_timestamp global _last_timestamp
epoch = datetime(1970, 1, 1, tzinfo=dt.tzinfo) epoch = datetime(1970, 1, 1, tzinfo=dt.tzinfo)
offset = epoch.tzinfo.utcoffset(epoch).total_seconds() if epoch.tzinfo else 0
offset = 0
if epoch.tzinfo:
offset_delta = epoch.tzinfo.utcoffset(epoch)
offset = offset_delta.days*24*3600 + offset_delta.seconds
timestamp = (dt - epoch).total_seconds() - offset timestamp = (dt - epoch).total_seconds() - offset
node = None node = None

View File

@@ -54,8 +54,10 @@ class MinTimeUUID(BaseQueryFunction):
super(MinTimeUUID, self).__init__(value) super(MinTimeUUID, self).__init__(value)
def get_value(self): def get_value(self):
epoch = datetime(1970, 1, 1) epoch = datetime(1970, 1, 1, tzinfo=self.value.tzinfo)
return long((self.value - epoch).total_seconds() * 1000) offset = epoch.tzinfo.utcoffset(epoch).total_seconds() if epoch.tzinfo else 0
return long(((self.value - epoch).total_seconds() - offset) * 1000)
def get_dict(self, column): def get_dict(self, column):
return {self.identifier: self.get_value()} return {self.identifier: self.get_value()}
@@ -79,8 +81,10 @@ class MaxTimeUUID(BaseQueryFunction):
super(MaxTimeUUID, self).__init__(value) super(MaxTimeUUID, self).__init__(value)
def get_value(self): def get_value(self):
epoch = datetime(1970, 1, 1) epoch = datetime(1970, 1, 1, tzinfo=self.value.tzinfo)
return long((self.value - epoch).total_seconds() * 1000) offset = epoch.tzinfo.utcoffset(epoch).total_seconds() if epoch.tzinfo else 0
return long(((self.value - epoch).total_seconds() - offset) * 1000)
def get_dict(self, column): def get_dict(self, column):
return {self.identifier: self.get_value()} return {self.identifier: self.get_value()}

View File

@@ -15,6 +15,7 @@ from cqlengine.columns import Bytes
from cqlengine.columns import Ascii from cqlengine.columns import Ascii
from cqlengine.columns import Text from cqlengine.columns import Text
from cqlengine.columns import Integer from cqlengine.columns import Integer
from cqlengine.columns import BigInt
from cqlengine.columns import VarInt from cqlengine.columns import VarInt
from cqlengine.columns import DateTime from cqlengine.columns import DateTime
from cqlengine.columns import Date from cqlengine.columns import Date
@@ -180,6 +181,16 @@ class TestInteger(BaseCassEngTestCase):
it = self.IntegerTest() it = self.IntegerTest()
it.validate() it.validate()
class TestBigInt(BaseCassEngTestCase):
class BigIntTest(Model):
test_id = UUID(primary_key=True, default=lambda:uuid4())
value = BigInt(default=0, required=True)
def test_default_zero_fields_validate(self):
""" Tests that bigint columns with a default value of 0 validate """
it = self.BigIntTest()
it.validate()
class TestText(BaseCassEngTestCase): class TestText(BaseCassEngTestCase):
def test_min_length(self): def test_min_length(self):

View File

@@ -95,6 +95,12 @@ class TestInteger(BaseColumnIOTest):
pkey_val = 5 pkey_val = 5
data_val = 6 data_val = 6
class TestBigInt(BaseColumnIOTest):
column = columns.BigInt
pkey_val = 6
data_val = pow(2, 63) - 1
class TestDateTime(BaseColumnIOTest): class TestDateTime(BaseColumnIOTest):
column = columns.DateTime column = columns.DateTime

View File

@@ -11,6 +11,25 @@ from cqlengine.management import delete_table
from cqlengine.models import Model from cqlengine.models import Model
from cqlengine import columns from cqlengine import columns
from cqlengine import query from cqlengine import query
from datetime import timedelta
from datetime import tzinfo
class TzOffset(tzinfo):
"""Minimal implementation of a timezone offset to help testing with timezone
aware datetimes.
"""
def __init__(self, offset):
self._offset = timedelta(hours=offset)
def utcoffset(self, dt):
return self._offset
def tzname(self, dt):
return 'TzOffset: {}'.format(self._offset.hours)
def dst(self, dt):
return timedelta(0)
class TestModel(Model): class TestModel(Model):
test_id = columns.Integer(primary_key=True) test_id = columns.Integer(primary_key=True)
@@ -515,6 +534,49 @@ class TestMinMaxTimeUUIDFunctions(BaseCassEngTestCase):
super(TestMinMaxTimeUUIDFunctions, cls).tearDownClass() super(TestMinMaxTimeUUIDFunctions, cls).tearDownClass()
delete_table(TimeUUIDQueryModel) delete_table(TimeUUIDQueryModel)
def test_tzaware_datetime_support(self):
"""Test that using timezone aware datetime instances works with the
MinTimeUUID/MaxTimeUUID functions.
"""
pk = uuid4()
midpoint_utc = datetime.utcnow().replace(tzinfo=TzOffset(0))
midpoint_helsinki = midpoint_utc.astimezone(TzOffset(3))
# Assert pre-condition that we have the same logical point in time
assert midpoint_utc.utctimetuple() == midpoint_helsinki.utctimetuple()
assert midpoint_utc.timetuple() != midpoint_helsinki.timetuple()
TimeUUIDQueryModel.create(
partition=pk,
time=columns.TimeUUID.from_datetime(midpoint_utc - timedelta(minutes=1)),
data='1')
TimeUUIDQueryModel.create(
partition=pk,
time=columns.TimeUUID.from_datetime(midpoint_utc),
data='2')
TimeUUIDQueryModel.create(
partition=pk,
time=columns.TimeUUID.from_datetime(midpoint_utc + timedelta(minutes=1)),
data='3')
assert ['1', '2'] == [o.data for o in TimeUUIDQueryModel.filter(
TimeUUIDQueryModel.partition == pk,
TimeUUIDQueryModel.time <= functions.MaxTimeUUID(midpoint_utc))]
assert ['1', '2'] == [o.data for o in TimeUUIDQueryModel.filter(
TimeUUIDQueryModel.partition == pk,
TimeUUIDQueryModel.time <= functions.MaxTimeUUID(midpoint_helsinki))]
assert ['2', '3'] == [o.data for o in TimeUUIDQueryModel.filter(
TimeUUIDQueryModel.partition == pk,
TimeUUIDQueryModel.time >= functions.MinTimeUUID(midpoint_utc))]
assert ['2', '3'] == [o.data for o in TimeUUIDQueryModel.filter(
TimeUUIDQueryModel.partition == pk,
TimeUUIDQueryModel.time >= functions.MinTimeUUID(midpoint_helsinki))]
def test_success_case(self): def test_success_case(self):
""" Test that the min and max time uuid functions work as expected """ """ Test that the min and max time uuid functions work as expected """
pk = uuid4() pk = uuid4()

View File

@@ -38,10 +38,16 @@ Columns
.. class:: Integer() .. class:: Integer()
Stores an integer value :: Stores a 32-bit signed integer value ::
columns.Integer() columns.Integer()
.. class:: BigInt()
Stores a 64-bit signed long value ::
columns.BigInt()
.. class:: VarInt() .. class:: VarInt()
Stores an arbitrary-precision integer :: Stores an arbitrary-precision integer ::