Make cloudkitty timezone-aware
This makes the naive datetime objects used in the codebase timezone-aware. Change-Id: I89ff3af5de275dcfeef7a0411ec0fb9d4df58ab3 Story: 2005319 Task: 30238
This commit is contained in:
parent
0c1546d4aa
commit
e552c3851f
@ -25,6 +25,7 @@ import wsmeext.pecan as wsme_pecan
|
|||||||
|
|
||||||
from cloudkitty.api.v1.datamodels import report as report_models
|
from cloudkitty.api.v1.datamodels import report as report_models
|
||||||
from cloudkitty.common import policy
|
from cloudkitty.common import policy
|
||||||
|
from cloudkitty import tzutils
|
||||||
from cloudkitty import utils as ck_utils
|
from cloudkitty import utils as ck_utils
|
||||||
|
|
||||||
LOG = logging.getLogger(__name__)
|
LOG = logging.getLogger(__name__)
|
||||||
@ -154,8 +155,8 @@ class ReportController(rest.RestController):
|
|||||||
kwargs = {
|
kwargs = {
|
||||||
'res_type': res.get('type') or res.get('res_type'),
|
'res_type': res.get('type') or res.get('res_type'),
|
||||||
'tenant_id': res.get(scope_key) or res.get('tenant_id'),
|
'tenant_id': res.get(scope_key) or res.get('tenant_id'),
|
||||||
'begin': res['begin'],
|
'begin': tzutils.local_to_utc(res['begin'], naive=True),
|
||||||
'end': res['end'],
|
'end': tzutils.local_to_utc(res['end'], naive=True),
|
||||||
'rate': res['rate'],
|
'rate': res['rate'],
|
||||||
}
|
}
|
||||||
summarymodel = report_models.SummaryModel(**kwargs)
|
summarymodel = report_models.SummaryModel(**kwargs)
|
||||||
|
@ -25,6 +25,7 @@ import wsmeext.pecan as wsme_pecan
|
|||||||
from cloudkitty.api.v1.datamodels import storage as storage_models
|
from cloudkitty.api.v1.datamodels import storage as storage_models
|
||||||
from cloudkitty.common import policy
|
from cloudkitty.common import policy
|
||||||
from cloudkitty import storage
|
from cloudkitty import storage
|
||||||
|
from cloudkitty import tzutils
|
||||||
|
|
||||||
|
|
||||||
CONF = cfg.CONF
|
CONF = cfg.CONF
|
||||||
@ -90,8 +91,10 @@ class DataFramesController(rest.RestController):
|
|||||||
frame_tenant = desc[scope_key]
|
frame_tenant = desc[scope_key]
|
||||||
resources.append(resource)
|
resources.append(resource)
|
||||||
dataframe = storage_models.DataFrame(
|
dataframe = storage_models.DataFrame(
|
||||||
begin=frame['period']['begin'],
|
begin=tzutils.local_to_utc(
|
||||||
end=frame['period']['end'],
|
frame['period']['begin'], naive=True),
|
||||||
|
end=tzutils.local_to_utc(
|
||||||
|
frame['period']['end'], naive=True),
|
||||||
tenant_id=frame_tenant,
|
tenant_id=frame_tenant,
|
||||||
resources=resources)
|
resources=resources)
|
||||||
dataframes.append(dataframe)
|
dataframes.append(dataframe)
|
||||||
|
@ -21,7 +21,7 @@ from cloudkitty.api.v2 import utils as api_utils
|
|||||||
from cloudkitty.common import policy
|
from cloudkitty.common import policy
|
||||||
from cloudkitty import messaging
|
from cloudkitty import messaging
|
||||||
from cloudkitty import storage_state
|
from cloudkitty import storage_state
|
||||||
from cloudkitty import utils as ck_utils
|
from cloudkitty import tzutils
|
||||||
|
|
||||||
|
|
||||||
class ScopeState(base.BaseResource):
|
class ScopeState(base.BaseResource):
|
||||||
@ -94,7 +94,7 @@ class ScopeState(base.BaseResource):
|
|||||||
voluptuous.Optional('collector', default=[]):
|
voluptuous.Optional('collector', default=[]):
|
||||||
api_utils.MultiQueryParam(str),
|
api_utils.MultiQueryParam(str),
|
||||||
voluptuous.Required('state'):
|
voluptuous.Required('state'):
|
||||||
voluptuous.Coerce(ck_utils.iso2dt),
|
voluptuous.Coerce(tzutils.dt_from_iso),
|
||||||
})
|
})
|
||||||
def put(self,
|
def put(self,
|
||||||
all_scopes=False,
|
all_scopes=False,
|
||||||
@ -133,7 +133,7 @@ class ScopeState(base.BaseResource):
|
|||||||
} for r in results]
|
} for r in results]
|
||||||
|
|
||||||
self._client.cast({}, 'reset_state', res_data={
|
self._client.cast({}, 'reset_state', res_data={
|
||||||
'scopes': serialized_results, 'state': ck_utils.dt2iso(state),
|
'scopes': serialized_results, 'state': state.isoformat(),
|
||||||
})
|
})
|
||||||
|
|
||||||
return {}, 202
|
return {}, 202
|
||||||
|
@ -18,7 +18,7 @@ import voluptuous
|
|||||||
from cloudkitty.api.v2 import base
|
from cloudkitty.api.v2 import base
|
||||||
from cloudkitty.api.v2 import utils as api_utils
|
from cloudkitty.api.v2 import utils as api_utils
|
||||||
from cloudkitty.common import policy
|
from cloudkitty.common import policy
|
||||||
from cloudkitty import utils
|
from cloudkitty import tzutils
|
||||||
|
|
||||||
|
|
||||||
class Summary(base.BaseResource):
|
class Summary(base.BaseResource):
|
||||||
@ -29,8 +29,8 @@ class Summary(base.BaseResource):
|
|||||||
voluptuous.Optional('groupby'): api_utils.MultiQueryParam(str),
|
voluptuous.Optional('groupby'): api_utils.MultiQueryParam(str),
|
||||||
voluptuous.Optional('filters'):
|
voluptuous.Optional('filters'):
|
||||||
api_utils.SingleDictQueryParam(str, str),
|
api_utils.SingleDictQueryParam(str, str),
|
||||||
voluptuous.Optional('begin'): voluptuous.Coerce(utils.iso2dt),
|
voluptuous.Optional('begin'): voluptuous.Coerce(tzutils.dt_from_iso),
|
||||||
voluptuous.Optional('end'): voluptuous.Coerce(utils.iso2dt),
|
voluptuous.Optional('end'): voluptuous.Coerce(tzutils.dt_from_iso),
|
||||||
})
|
})
|
||||||
def get(self, groupby=None, filters={},
|
def get(self, groupby=None, filters={},
|
||||||
begin=None, end=None,
|
begin=None, end=None,
|
||||||
@ -39,8 +39,8 @@ class Summary(base.BaseResource):
|
|||||||
flask.request.context,
|
flask.request.context,
|
||||||
'summary:get_summary',
|
'summary:get_summary',
|
||||||
{'tenant_id': flask.request.context.project_id})
|
{'tenant_id': flask.request.context.project_id})
|
||||||
begin = begin or utils.get_month_start()
|
begin = begin or tzutils.get_month_start()
|
||||||
end = end or utils.get_next_month()
|
end = end or tzutils.get_next_month()
|
||||||
|
|
||||||
if not flask.request.context.is_admin:
|
if not flask.request.context.is_admin:
|
||||||
filters['project_id'] = flask.request.context.project_id
|
filters['project_id'] = flask.request.context.project_id
|
||||||
|
@ -17,8 +17,6 @@ import decimal
|
|||||||
import functools
|
import functools
|
||||||
import json
|
import json
|
||||||
|
|
||||||
from cloudkitty import utils as ck_utils
|
|
||||||
|
|
||||||
|
|
||||||
class CloudkittyJSONEncoder(json.JSONEncoder):
|
class CloudkittyJSONEncoder(json.JSONEncoder):
|
||||||
"""Cloudkitty custom json encoder."""
|
"""Cloudkitty custom json encoder."""
|
||||||
@ -27,7 +25,7 @@ class CloudkittyJSONEncoder(json.JSONEncoder):
|
|||||||
if isinstance(obj, decimal.Decimal):
|
if isinstance(obj, decimal.Decimal):
|
||||||
return float(obj)
|
return float(obj)
|
||||||
elif isinstance(obj, datetime.datetime):
|
elif isinstance(obj, datetime.datetime):
|
||||||
return ck_utils.dt2iso(obj)
|
return obj.isoformat()
|
||||||
return super(CloudkittyJSONEncoder, self).default(obj)
|
return super(CloudkittyJSONEncoder, self).default(obj)
|
||||||
|
|
||||||
|
|
||||||
|
@ -39,6 +39,7 @@ from cloudkitty import messaging
|
|||||||
from cloudkitty import storage
|
from cloudkitty import storage
|
||||||
from cloudkitty import storage_state as state
|
from cloudkitty import storage_state as state
|
||||||
from cloudkitty import transformer
|
from cloudkitty import transformer
|
||||||
|
from cloudkitty import tzutils
|
||||||
from cloudkitty import utils as ck_utils
|
from cloudkitty import utils as ck_utils
|
||||||
|
|
||||||
|
|
||||||
@ -165,7 +166,7 @@ class ScopeEndpoint(object):
|
|||||||
lock_name,
|
lock_name,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
state_dt = ck_utils.iso2dt(res_data['state'])
|
state_dt = tzutils.dt_from_iso(res_data['state'])
|
||||||
try:
|
try:
|
||||||
self._storage.delete(begin=state_dt, end=None, filters={
|
self._storage.delete(begin=state_dt, end=None, filters={
|
||||||
scope['scope_key']: scope['scope_id'],
|
scope['scope_key']: scope['scope_id'],
|
||||||
@ -247,7 +248,8 @@ class Worker(BaseWorker):
|
|||||||
super(Worker, self).__init__(self._tenant_id)
|
super(Worker, self).__init__(self._tenant_id)
|
||||||
|
|
||||||
def _collect(self, metric, start_timestamp):
|
def _collect(self, metric, start_timestamp):
|
||||||
next_timestamp = start_timestamp + timedelta(seconds=self._period)
|
next_timestamp = tzutils.add_delta(
|
||||||
|
start_timestamp, timedelta(seconds=self._period))
|
||||||
|
|
||||||
raw_data = self._collector.retrieve(
|
raw_data = self._collector.retrieve(
|
||||||
metric,
|
metric,
|
||||||
|
@ -13,12 +13,14 @@
|
|||||||
# License for the specific language governing permissions and limitations
|
# License for the specific language governing permissions and limitations
|
||||||
# under the License.
|
# under the License.
|
||||||
#
|
#
|
||||||
|
import functools
|
||||||
|
|
||||||
from oslo_config import cfg
|
from oslo_config import cfg
|
||||||
from oslo_log import log as logging
|
from oslo_log import log as logging
|
||||||
from stevedore import driver
|
from stevedore import driver
|
||||||
|
|
||||||
from cloudkitty.storage import v2 as storage_v2
|
from cloudkitty.storage import v2 as storage_v2
|
||||||
|
from cloudkitty import tzutils
|
||||||
|
|
||||||
LOG = logging.getLogger(__name__)
|
LOG = logging.getLogger(__name__)
|
||||||
|
|
||||||
@ -63,12 +65,28 @@ class V1StorageAdapter(storage_v2.BaseStorage):
|
|||||||
def __init__(self, storage_args, storage_namespace, backend=None):
|
def __init__(self, storage_args, storage_namespace, backend=None):
|
||||||
self.storage = _get_storage_instance(
|
self.storage = _get_storage_instance(
|
||||||
storage_args, storage_namespace, backend=backend)
|
storage_args, storage_namespace, backend=backend)
|
||||||
|
self._localize_dataframes = functools.partial(
|
||||||
|
self.__update_frames_timestamps, tzutils.utc_to_local)
|
||||||
|
self._make_dataframes_naive = functools.partial(
|
||||||
|
self.__update_frames_timestamps, tzutils.local_to_utc, naive=True)
|
||||||
|
|
||||||
def init(self):
|
def init(self):
|
||||||
return self.storage.init()
|
return self.storage.init()
|
||||||
|
|
||||||
def push(self, dataframes, scope_id):
|
@staticmethod
|
||||||
|
def __update_frames_timestamps(func, frames, **kwargs):
|
||||||
|
for frame in frames:
|
||||||
|
period = frame['period'] if 'period' in frame.keys() else frame
|
||||||
|
begin = period['begin']
|
||||||
|
end = period['end']
|
||||||
|
if begin:
|
||||||
|
period['begin'] = func(begin, **kwargs)
|
||||||
|
if end:
|
||||||
|
period['end'] = func(end, **kwargs)
|
||||||
|
|
||||||
|
def push(self, dataframes, scope_id=None):
|
||||||
if dataframes:
|
if dataframes:
|
||||||
|
self._make_dataframes_naive(dataframes)
|
||||||
self.storage.append(dataframes, scope_id)
|
self.storage.append(dataframes, scope_id)
|
||||||
self.storage.commit(scope_id)
|
self.storage.commit(scope_id)
|
||||||
|
|
||||||
@ -85,10 +103,11 @@ class V1StorageAdapter(storage_v2.BaseStorage):
|
|||||||
tenant_id = filters.get('project_id') if filters else None
|
tenant_id = filters.get('project_id') if filters else None
|
||||||
metric_types = self._check_metric_types(metric_types)
|
metric_types = self._check_metric_types(metric_types)
|
||||||
frames = self.storage.get_time_frame(
|
frames = self.storage.get_time_frame(
|
||||||
begin, end,
|
tzutils.local_to_utc(begin, naive=True) if begin else None,
|
||||||
|
tzutils.local_to_utc(end, naive=True) if end else None,
|
||||||
res_type=metric_types,
|
res_type=metric_types,
|
||||||
tenant_id=tenant_id)
|
tenant_id=tenant_id)
|
||||||
|
self._localize_dataframes(frames)
|
||||||
return {
|
return {
|
||||||
'total': len(frames),
|
'total': len(frames),
|
||||||
'dataframes': frames,
|
'dataframes': frames,
|
||||||
@ -111,7 +130,8 @@ class V1StorageAdapter(storage_v2.BaseStorage):
|
|||||||
storage_gby = ','.join(storage_gby) if storage_gby else None
|
storage_gby = ','.join(storage_gby) if storage_gby else None
|
||||||
metric_types = self._check_metric_types(metric_types)
|
metric_types = self._check_metric_types(metric_types)
|
||||||
total = self.storage.get_total(
|
total = self.storage.get_total(
|
||||||
begin, end,
|
tzutils.local_to_utc(begin, naive=True),
|
||||||
|
tzutils.local_to_utc(end, naive=True),
|
||||||
tenant_id=tenant_id,
|
tenant_id=tenant_id,
|
||||||
service=metric_types,
|
service=metric_types,
|
||||||
groupby=storage_gby)
|
groupby=storage_gby)
|
||||||
@ -125,6 +145,8 @@ class V1StorageAdapter(storage_v2.BaseStorage):
|
|||||||
t['type'] = t.get('res_type')
|
t['type'] = t.get('res_type')
|
||||||
else:
|
else:
|
||||||
t['type'] = None
|
t['type'] = None
|
||||||
|
|
||||||
|
self._localize_dataframes(total)
|
||||||
return {
|
return {
|
||||||
'total': len(total),
|
'total': len(total),
|
||||||
'results': total,
|
'results': total,
|
||||||
|
@ -22,6 +22,7 @@ from oslo_log import log
|
|||||||
import six
|
import six
|
||||||
|
|
||||||
from cloudkitty.storage import v2 as v2_storage
|
from cloudkitty.storage import v2 as v2_storage
|
||||||
|
from cloudkitty import tzutils
|
||||||
from cloudkitty import utils
|
from cloudkitty import utils
|
||||||
|
|
||||||
|
|
||||||
@ -149,7 +150,7 @@ class InfluxClient(object):
|
|||||||
@staticmethod
|
@staticmethod
|
||||||
def _get_time_query(begin, end):
|
def _get_time_query(begin, end):
|
||||||
return " WHERE time >= '{}' AND time < '{}'".format(
|
return " WHERE time >= '{}' AND time < '{}'".format(
|
||||||
utils.isotime(begin), utils.isotime(end))
|
begin.isoformat(), end.isoformat())
|
||||||
|
|
||||||
def _get_filter_query(self, filters):
|
def _get_filter_query(self, filters):
|
||||||
if not filters:
|
if not filters:
|
||||||
@ -176,7 +177,6 @@ class InfluxClient(object):
|
|||||||
query += ' GROUP BY ' + groupby_query
|
query += ' GROUP BY ' + groupby_query
|
||||||
|
|
||||||
query += ';'
|
query += ';'
|
||||||
|
|
||||||
return self._conn.query(query)
|
return self._conn.query(query)
|
||||||
|
|
||||||
def retrieve(self,
|
def retrieve(self,
|
||||||
@ -263,10 +263,10 @@ class InfluxStorage(v2_storage.BaseStorage):
|
|||||||
@staticmethod
|
@staticmethod
|
||||||
def _check_begin_end(begin, end):
|
def _check_begin_end(begin, end):
|
||||||
if not begin:
|
if not begin:
|
||||||
begin = utils.get_month_start()
|
begin = tzutils.get_month_start()
|
||||||
if not end:
|
if not end:
|
||||||
end = utils.get_next_month()
|
end = tzutils.get_next_month()
|
||||||
return begin, end
|
return tzutils.local_to_utc(begin), tzutils.local_to_utc(end)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _point_to_dataframe_entry(point):
|
def _point_to_dataframe_entry(point):
|
||||||
@ -290,17 +290,17 @@ class InfluxStorage(v2_storage.BaseStorage):
|
|||||||
dataframes = {}
|
dataframes = {}
|
||||||
for point in points:
|
for point in points:
|
||||||
point_type = point['type']
|
point_type = point['type']
|
||||||
if point['time'] not in dataframes.keys():
|
time = tzutils.dt_from_iso(point['time'])
|
||||||
dataframes[point['time']] = {
|
if time not in dataframes.keys():
|
||||||
|
dataframes[time] = {
|
||||||
'period': {
|
'period': {
|
||||||
'begin': point['time'],
|
'begin': time,
|
||||||
'end': utils.isotime(
|
'end': tzutils.add_delta(
|
||||||
utils.iso2dt(point['time'])
|
time, datetime.timedelta(seconds=self._period))
|
||||||
+ datetime.timedelta(seconds=self._period)),
|
|
||||||
},
|
},
|
||||||
'usage': {},
|
'usage': {},
|
||||||
}
|
}
|
||||||
usage = dataframes[point['time']]['usage']
|
usage = dataframes[time]['usage']
|
||||||
if point_type not in usage.keys():
|
if point_type not in usage.keys():
|
||||||
usage[point_type] = []
|
usage[point_type] = []
|
||||||
usage[point_type].append(self._point_to_dataframe_entry(point))
|
usage[point_type].append(self._point_to_dataframe_entry(point))
|
||||||
|
@ -20,6 +20,7 @@ from oslo_log import log
|
|||||||
from cloudkitty import db
|
from cloudkitty import db
|
||||||
from cloudkitty.storage_state import migration
|
from cloudkitty.storage_state import migration
|
||||||
from cloudkitty.storage_state import models
|
from cloudkitty.storage_state import models
|
||||||
|
from cloudkitty import tzutils
|
||||||
|
|
||||||
|
|
||||||
LOG = log.getLogger(__name__)
|
LOG = log.getLogger(__name__)
|
||||||
@ -75,6 +76,10 @@ class StateManager(object):
|
|||||||
|
|
||||||
r = q.all()
|
r = q.all()
|
||||||
session.close()
|
session.close()
|
||||||
|
|
||||||
|
for item in r:
|
||||||
|
item.state = tzutils.utc_to_local(item.state)
|
||||||
|
|
||||||
return r
|
return r
|
||||||
|
|
||||||
def _get_db_item(self, session, identifier,
|
def _get_db_item(self, session, identifier,
|
||||||
@ -127,6 +132,7 @@ class StateManager(object):
|
|||||||
:param scope_key: scope_key associated to the scope
|
:param scope_key: scope_key associated to the scope
|
||||||
:type scope_key: str
|
:type scope_key: str
|
||||||
"""
|
"""
|
||||||
|
state = tzutils.local_to_utc(state, naive=True)
|
||||||
session = db.get_session()
|
session = db.get_session()
|
||||||
session.begin()
|
session.begin()
|
||||||
r = self._get_db_item(
|
r = self._get_db_item(
|
||||||
@ -167,7 +173,7 @@ class StateManager(object):
|
|||||||
r = self._get_db_item(
|
r = self._get_db_item(
|
||||||
session, identifier, fetcher, collector, scope_key)
|
session, identifier, fetcher, collector, scope_key)
|
||||||
session.close()
|
session.close()
|
||||||
return r.state if r else None
|
return tzutils.utc_to_local(r.state) if r else None
|
||||||
|
|
||||||
def init(self):
|
def init(self):
|
||||||
migration.upgrade('head')
|
migration.upgrade('head')
|
||||||
|
@ -45,6 +45,7 @@ from cloudkitty import storage_state
|
|||||||
from cloudkitty import tests
|
from cloudkitty import tests
|
||||||
from cloudkitty.tests.storage.v2 import influx_utils
|
from cloudkitty.tests.storage.v2 import influx_utils
|
||||||
from cloudkitty.tests import utils as test_utils
|
from cloudkitty.tests import utils as test_utils
|
||||||
|
from cloudkitty import tzutils
|
||||||
from cloudkitty import utils as ck_utils
|
from cloudkitty import utils as ck_utils
|
||||||
|
|
||||||
INITIAL_TIMESTAMP = 1420070400
|
INITIAL_TIMESTAMP = 1420070400
|
||||||
@ -463,8 +464,8 @@ class InfluxStorageDataFixture(NowStorageDataFixture):
|
|||||||
|
|
||||||
def initialize_data(self):
|
def initialize_data(self):
|
||||||
data = test_utils.generate_v2_storage_data(
|
data = test_utils.generate_v2_storage_data(
|
||||||
start=ck_utils.get_month_start(),
|
start=tzutils.get_month_start(),
|
||||||
end=ck_utils.utcnow().replace(hour=0),
|
end=tzutils.localized_now().replace(hour=0),
|
||||||
)
|
)
|
||||||
self.storage.push([data])
|
self.storage.push([data])
|
||||||
|
|
||||||
|
@ -4,6 +4,6 @@ fixtures:
|
|||||||
- NowStorageDataFixture
|
- NowStorageDataFixture
|
||||||
|
|
||||||
tests:
|
tests:
|
||||||
- name: Can query api without auth
|
- name: Can query API without auth
|
||||||
url: /v1/storage/dataframes
|
url: /v1/storage/dataframes
|
||||||
status: 200
|
status: 200
|
||||||
|
@ -23,6 +23,7 @@ from cloudkitty import storage
|
|||||||
from cloudkitty import tests
|
from cloudkitty import tests
|
||||||
from cloudkitty.tests import samples
|
from cloudkitty.tests import samples
|
||||||
from cloudkitty.tests import utils as test_utils
|
from cloudkitty.tests import utils as test_utils
|
||||||
|
from cloudkitty import tzutils
|
||||||
|
|
||||||
|
|
||||||
class StorageTest(tests.TestCase):
|
class StorageTest(tests.TestCase):
|
||||||
@ -134,8 +135,9 @@ class StorageTotalTest(StorageTest):
|
|||||||
|
|
||||||
# Total
|
# Total
|
||||||
def test_get_empty_total(self):
|
def test_get_empty_total(self):
|
||||||
begin = samples.FIRST_PERIOD_BEGIN - datetime.timedelta(seconds=3600)
|
begin = tzutils.utc_to_local(samples.FIRST_PERIOD_BEGIN
|
||||||
end = samples.FIRST_PERIOD_BEGIN
|
- datetime.timedelta(seconds=3600))
|
||||||
|
end = tzutils.utc_to_local(samples.FIRST_PERIOD_BEGIN)
|
||||||
self.insert_data()
|
self.insert_data()
|
||||||
total = self.storage.total(
|
total = self.storage.total(
|
||||||
begin=begin,
|
begin=begin,
|
||||||
@ -146,8 +148,8 @@ class StorageTotalTest(StorageTest):
|
|||||||
self.assertEqual(end, total[0]["end"])
|
self.assertEqual(end, total[0]["end"])
|
||||||
|
|
||||||
def test_get_total_without_filter_but_timestamp(self):
|
def test_get_total_without_filter_but_timestamp(self):
|
||||||
begin = samples.FIRST_PERIOD_BEGIN
|
begin = tzutils.utc_to_local(samples.FIRST_PERIOD_BEGIN)
|
||||||
end = samples.SECOND_PERIOD_END
|
end = tzutils.utc_to_local(samples.SECOND_PERIOD_END)
|
||||||
self.insert_data()
|
self.insert_data()
|
||||||
total = self.storage.total(
|
total = self.storage.total(
|
||||||
begin=begin,
|
begin=begin,
|
||||||
@ -159,8 +161,8 @@ class StorageTotalTest(StorageTest):
|
|||||||
self.assertEqual(end, total[0]["end"])
|
self.assertEqual(end, total[0]["end"])
|
||||||
|
|
||||||
def test_get_total_filtering_on_one_period(self):
|
def test_get_total_filtering_on_one_period(self):
|
||||||
begin = samples.FIRST_PERIOD_BEGIN
|
begin = tzutils.utc_to_local(samples.FIRST_PERIOD_BEGIN)
|
||||||
end = samples.FIRST_PERIOD_END
|
end = tzutils.utc_to_local(samples.FIRST_PERIOD_END)
|
||||||
self.insert_data()
|
self.insert_data()
|
||||||
total = self.storage.total(
|
total = self.storage.total(
|
||||||
begin=begin,
|
begin=begin,
|
||||||
@ -171,8 +173,8 @@ class StorageTotalTest(StorageTest):
|
|||||||
self.assertEqual(end, total[0]["end"])
|
self.assertEqual(end, total[0]["end"])
|
||||||
|
|
||||||
def test_get_total_filtering_on_one_period_and_one_tenant(self):
|
def test_get_total_filtering_on_one_period_and_one_tenant(self):
|
||||||
begin = samples.FIRST_PERIOD_BEGIN
|
begin = tzutils.utc_to_local(samples.FIRST_PERIOD_BEGIN)
|
||||||
end = samples.FIRST_PERIOD_END
|
end = tzutils.utc_to_local(samples.FIRST_PERIOD_END)
|
||||||
self.insert_data()
|
self.insert_data()
|
||||||
filters = {'project_id': self._tenant_id}
|
filters = {'project_id': self._tenant_id}
|
||||||
total = self.storage.total(
|
total = self.storage.total(
|
||||||
@ -186,8 +188,8 @@ class StorageTotalTest(StorageTest):
|
|||||||
self.assertEqual(end, total[0]["end"])
|
self.assertEqual(end, total[0]["end"])
|
||||||
|
|
||||||
def test_get_total_filtering_on_service(self):
|
def test_get_total_filtering_on_service(self):
|
||||||
begin = samples.FIRST_PERIOD_BEGIN
|
begin = tzutils.utc_to_local(samples.FIRST_PERIOD_BEGIN)
|
||||||
end = samples.FIRST_PERIOD_END
|
end = tzutils.utc_to_local(samples.FIRST_PERIOD_END)
|
||||||
self.insert_data()
|
self.insert_data()
|
||||||
total = self.storage.total(
|
total = self.storage.total(
|
||||||
begin=begin,
|
begin=begin,
|
||||||
@ -200,8 +202,8 @@ class StorageTotalTest(StorageTest):
|
|||||||
self.assertEqual(end, total[0]["end"])
|
self.assertEqual(end, total[0]["end"])
|
||||||
|
|
||||||
def test_get_total_groupby_tenant(self):
|
def test_get_total_groupby_tenant(self):
|
||||||
begin = samples.FIRST_PERIOD_BEGIN
|
begin = tzutils.utc_to_local(samples.FIRST_PERIOD_BEGIN)
|
||||||
end = samples.SECOND_PERIOD_END
|
end = tzutils.utc_to_local(samples.SECOND_PERIOD_END)
|
||||||
self.insert_data()
|
self.insert_data()
|
||||||
total = self.storage.total(
|
total = self.storage.total(
|
||||||
begin=begin,
|
begin=begin,
|
||||||
@ -218,8 +220,8 @@ class StorageTotalTest(StorageTest):
|
|||||||
self.assertEqual(end, total[1]["end"])
|
self.assertEqual(end, total[1]["end"])
|
||||||
|
|
||||||
def test_get_total_groupby_restype(self):
|
def test_get_total_groupby_restype(self):
|
||||||
begin = samples.FIRST_PERIOD_BEGIN
|
begin = tzutils.utc_to_local(samples.FIRST_PERIOD_BEGIN)
|
||||||
end = samples.SECOND_PERIOD_END
|
end = tzutils.utc_to_local(samples.SECOND_PERIOD_END)
|
||||||
self.insert_data()
|
self.insert_data()
|
||||||
total = self.storage.total(
|
total = self.storage.total(
|
||||||
begin=begin,
|
begin=begin,
|
||||||
@ -236,8 +238,8 @@ class StorageTotalTest(StorageTest):
|
|||||||
self.assertEqual(end, total[1]["end"])
|
self.assertEqual(end, total[1]["end"])
|
||||||
|
|
||||||
def test_get_total_groupby_tenant_and_restype(self):
|
def test_get_total_groupby_tenant_and_restype(self):
|
||||||
begin = samples.FIRST_PERIOD_BEGIN
|
begin = tzutils.utc_to_local(samples.FIRST_PERIOD_BEGIN)
|
||||||
end = samples.SECOND_PERIOD_END
|
end = tzutils.utc_to_local(samples.SECOND_PERIOD_END)
|
||||||
self.insert_data()
|
self.insert_data()
|
||||||
total = self.storage.total(
|
total = self.storage.total(
|
||||||
begin=begin,
|
begin=begin,
|
||||||
|
@ -18,7 +18,6 @@ import functools
|
|||||||
from influxdb import resultset
|
from influxdb import resultset
|
||||||
|
|
||||||
from cloudkitty.storage.v2.influx import InfluxClient
|
from cloudkitty.storage.v2.influx import InfluxClient
|
||||||
from cloudkitty import utils
|
|
||||||
|
|
||||||
|
|
||||||
class FakeInfluxClient(InfluxClient):
|
class FakeInfluxClient(InfluxClient):
|
||||||
@ -123,7 +122,7 @@ class FakeInfluxClient(InfluxClient):
|
|||||||
|
|
||||||
def __get_tag_or_field(point, key):
|
def __get_tag_or_field(point, key):
|
||||||
if key == 'time':
|
if key == 'time':
|
||||||
return utils.isotime(point['time'])
|
return point['time'].isoformat()
|
||||||
return point['tags'].get(key) or point['fields'].get(key)
|
return point['tags'].get(key) or point['fields'].get(key)
|
||||||
|
|
||||||
for point in points:
|
for point in points:
|
||||||
|
@ -22,6 +22,7 @@ from cloudkitty.tests import samples
|
|||||||
from cloudkitty.tests.storage.v2 import influx_utils
|
from cloudkitty.tests.storage.v2 import influx_utils
|
||||||
from cloudkitty.tests import TestCase
|
from cloudkitty.tests import TestCase
|
||||||
from cloudkitty.tests import utils as test_utils
|
from cloudkitty.tests import utils as test_utils
|
||||||
|
from cloudkitty import tzutils
|
||||||
|
|
||||||
|
|
||||||
class StorageUnitTest(TestCase):
|
class StorageUnitTest(TestCase):
|
||||||
@ -51,13 +52,12 @@ class StorageUnitTest(TestCase):
|
|||||||
|
|
||||||
def init_data(self):
|
def init_data(self):
|
||||||
project_ids = [self._project_id, self._other_project_id]
|
project_ids = [self._project_id, self._other_project_id]
|
||||||
|
start_base = tzutils.utc_to_local(datetime.datetime(2018, 1, 1))
|
||||||
for i in range(3):
|
for i in range(3):
|
||||||
start_delta = 3600 * i
|
start_delta = datetime.timedelta(seconds=3600 * i)
|
||||||
end_delta = start_delta + 3600
|
end_delta = start_delta + datetime.timedelta(seconds=3600)
|
||||||
start = datetime.datetime(2018, 1, 1) \
|
start = tzutils.add_delta(start_base, start_delta)
|
||||||
+ datetime.timedelta(seconds=start_delta)
|
end = tzutils.add_delta(start_base, end_delta)
|
||||||
end = datetime.datetime(2018, 1, 1) \
|
|
||||||
+ datetime.timedelta(seconds=end_delta)
|
|
||||||
data = test_utils.generate_v2_storage_data(
|
data = test_utils.generate_v2_storage_data(
|
||||||
project_ids=project_ids,
|
project_ids=project_ids,
|
||||||
start=start,
|
start=start,
|
||||||
|
@ -15,6 +15,8 @@
|
|||||||
import datetime
|
import datetime
|
||||||
import decimal
|
import decimal
|
||||||
|
|
||||||
|
from dateutil import tz
|
||||||
|
|
||||||
from cloudkitty import json_utils as json
|
from cloudkitty import json_utils as json
|
||||||
from cloudkitty import tests
|
from cloudkitty import tests
|
||||||
|
|
||||||
@ -26,5 +28,6 @@ class JSONEncoderTest(tests.TestCase):
|
|||||||
self.assertEqual(json.dumps(obj), '{"nb": 42.0}')
|
self.assertEqual(json.dumps(obj), '{"nb": 42.0}')
|
||||||
|
|
||||||
def test_encode_datetime(self):
|
def test_encode_datetime(self):
|
||||||
obj = {'date': datetime.datetime(2019, 1, 1)}
|
obj = {'date': datetime.datetime(2019, 1, 1, tzinfo=tz.UTC)}
|
||||||
self.assertEqual(json.dumps(obj), '{"date": "2019-01-01T00:00:00Z"}')
|
self.assertEqual(json.dumps(obj),
|
||||||
|
'{"date": "2019-01-01T00:00:00+00:00"}')
|
||||||
|
@ -14,17 +14,19 @@
|
|||||||
# under the License.
|
# under the License.
|
||||||
#
|
#
|
||||||
import datetime
|
import datetime
|
||||||
|
|
||||||
import mock
|
import mock
|
||||||
from oslo_messaging import conffixture
|
from oslo_messaging import conffixture
|
||||||
from stevedore import extension
|
from stevedore import extension
|
||||||
|
from tooz import coordination
|
||||||
|
from tooz.drivers import file
|
||||||
|
|
||||||
from cloudkitty import collector
|
from cloudkitty import collector
|
||||||
from cloudkitty import orchestrator
|
from cloudkitty import orchestrator
|
||||||
from cloudkitty.storage.v2 import influx
|
from cloudkitty.storage.v2 import influx
|
||||||
from cloudkitty import storage_state
|
from cloudkitty import storage_state
|
||||||
from cloudkitty import tests
|
from cloudkitty import tests
|
||||||
from tooz import coordination
|
from cloudkitty import tzutils
|
||||||
from tooz.drivers import file
|
|
||||||
|
|
||||||
|
|
||||||
class FakeKeystoneClient(object):
|
class FakeKeystoneClient(object):
|
||||||
@ -79,14 +81,16 @@ class ScopeEndpointTest(tests.TestCase):
|
|||||||
|
|
||||||
sd.assert_has_calls([
|
sd.assert_has_calls([
|
||||||
mock.call(
|
mock.call(
|
||||||
begin=datetime.datetime(2019, 7, 16, 8, 55, 1),
|
begin=tzutils.utc_to_local(
|
||||||
|
datetime.datetime(2019, 7, 16, 8, 55, 1)),
|
||||||
end=None,
|
end=None,
|
||||||
filters={
|
filters={
|
||||||
'project_id': 'f266f30b11f246b589fd266f85eeec39',
|
'project_id': 'f266f30b11f246b589fd266f85eeec39',
|
||||||
'collector': 'prometheus',
|
'collector': 'prometheus',
|
||||||
'fetcher': 'prometheus'}),
|
'fetcher': 'prometheus'}),
|
||||||
mock.call(
|
mock.call(
|
||||||
begin=datetime.datetime(2019, 7, 16, 8, 55, 1),
|
begin=tzutils.utc_to_local(
|
||||||
|
datetime.datetime(2019, 7, 16, 8, 55, 1)),
|
||||||
end=None,
|
end=None,
|
||||||
filters={
|
filters={
|
||||||
'project_id': '4dfb25b0947c4f5481daf7b948c14187',
|
'project_id': '4dfb25b0947c4f5481daf7b948c14187',
|
||||||
@ -96,13 +100,15 @@ class ScopeEndpointTest(tests.TestCase):
|
|||||||
ss.assert_has_calls([
|
ss.assert_has_calls([
|
||||||
mock.call(
|
mock.call(
|
||||||
'f266f30b11f246b589fd266f85eeec39',
|
'f266f30b11f246b589fd266f85eeec39',
|
||||||
datetime.datetime(2019, 7, 16, 8, 55, 1),
|
tzutils.utc_to_local(
|
||||||
|
datetime.datetime(2019, 7, 16, 8, 55, 1)),
|
||||||
scope_key='project_id',
|
scope_key='project_id',
|
||||||
collector='prometheus',
|
collector='prometheus',
|
||||||
fetcher='prometheus'),
|
fetcher='prometheus'),
|
||||||
mock.call(
|
mock.call(
|
||||||
'4dfb25b0947c4f5481daf7b948c14187',
|
'4dfb25b0947c4f5481daf7b948c14187',
|
||||||
datetime.datetime(2019, 7, 16, 8, 55, 1),
|
tzutils.utc_to_local(
|
||||||
|
datetime.datetime(2019, 7, 16, 8, 55, 1)),
|
||||||
scope_key='project_id',
|
scope_key='project_id',
|
||||||
collector='gnocchi',
|
collector='gnocchi',
|
||||||
fetcher='gnocchi')], any_order=True)
|
fetcher='gnocchi')], any_order=True)
|
||||||
|
103
cloudkitty/tests/test_tzutils.py
Normal file
103
cloudkitty/tests/test_tzutils.py
Normal file
@ -0,0 +1,103 @@
|
|||||||
|
# Copyright 2019 Objectif Libre
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
# not use this file except in compliance with the License. You may obtain
|
||||||
|
# a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||||
|
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||||
|
# License for the specific language governing permissions and limitations
|
||||||
|
# under the License.
|
||||||
|
#
|
||||||
|
import datetime
|
||||||
|
import unittest
|
||||||
|
|
||||||
|
from dateutil import tz
|
||||||
|
from oslo_utils import timeutils
|
||||||
|
|
||||||
|
from cloudkitty import tzutils
|
||||||
|
from cloudkitty import utils
|
||||||
|
|
||||||
|
|
||||||
|
class TestTZUtils(unittest.TestCase):
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
self.local_now = tzutils.localized_now()
|
||||||
|
self.naive_now = utils.utcnow().replace(microsecond=0)
|
||||||
|
|
||||||
|
def test_localized_now(self):
|
||||||
|
self.assertEqual(
|
||||||
|
self.local_now.astimezone(tz.UTC).replace(tzinfo=None),
|
||||||
|
self.naive_now)
|
||||||
|
self.assertIsNotNone(self.local_now.tzinfo)
|
||||||
|
|
||||||
|
def test_local_to_utc_naive(self):
|
||||||
|
naive_local = tzutils.local_to_utc(self.local_now, naive=True)
|
||||||
|
naive_naive = tzutils.local_to_utc(self.naive_now, naive=True)
|
||||||
|
self.assertIsNone(naive_local.tzinfo)
|
||||||
|
self.assertIsNone(naive_naive.tzinfo)
|
||||||
|
self.assertEqual(naive_local, naive_naive)
|
||||||
|
|
||||||
|
def test_local_to_utc_not_naive(self):
|
||||||
|
local = tzutils.local_to_utc(self.local_now)
|
||||||
|
naive = tzutils.local_to_utc(self.naive_now)
|
||||||
|
self.assertIsNotNone(local.tzinfo)
|
||||||
|
self.assertIsNotNone(naive.tzinfo)
|
||||||
|
self.assertEqual(local, naive)
|
||||||
|
|
||||||
|
def test_utc_to_local(self):
|
||||||
|
self.assertEqual(tzutils.utc_to_local(self.naive_now), self.local_now)
|
||||||
|
|
||||||
|
def test_dt_from_iso(self):
|
||||||
|
tester = '2019-06-06T16:30:54+02:00'
|
||||||
|
tester_utc = '2019-06-06T14:30:54+00:00'
|
||||||
|
|
||||||
|
dt = tzutils.dt_from_iso(tester)
|
||||||
|
self.assertIsNotNone(dt.tzinfo)
|
||||||
|
self.assertEqual(tzutils.dt_from_iso(tester, as_utc=True).isoformat(),
|
||||||
|
tester_utc)
|
||||||
|
|
||||||
|
def _test_add_delta(self, obj, tzone):
|
||||||
|
delta = datetime.timedelta(seconds=3600)
|
||||||
|
naive = obj.astimezone(tz.UTC).replace(tzinfo=None)
|
||||||
|
|
||||||
|
self.assertEqual(
|
||||||
|
tzutils.add_delta(obj, delta).astimezone(tzone),
|
||||||
|
(naive + delta).replace(tzinfo=tz.UTC).astimezone(tzone),
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_add_delta_summertime(self):
|
||||||
|
tzone = tz.gettz('Europe/Paris')
|
||||||
|
obj = datetime.datetime(2019, 3, 31, 1, tzinfo=tzone)
|
||||||
|
self._test_add_delta(obj, tzone)
|
||||||
|
|
||||||
|
def test_add_delta(self):
|
||||||
|
tzone = tz.gettz('Europe/Paris')
|
||||||
|
obj = datetime.datetime(2019, 1, 1, tzinfo=tzone)
|
||||||
|
self._test_add_delta(obj, tzone)
|
||||||
|
|
||||||
|
def test_get_month_start_no_arg(self):
|
||||||
|
naive_utc_now = timeutils.utcnow()
|
||||||
|
naive_month_start = datetime.datetime(
|
||||||
|
naive_utc_now.year, naive_utc_now.month, 1)
|
||||||
|
month_start = tzutils.get_month_start()
|
||||||
|
self.assertIsNotNone(month_start.tzinfo)
|
||||||
|
self.assertEqual(
|
||||||
|
naive_month_start,
|
||||||
|
month_start.replace(tzinfo=None))
|
||||||
|
|
||||||
|
def test_get_month_start_with_arg(self):
|
||||||
|
param = datetime.datetime(2019, 1, 3, 4, 5)
|
||||||
|
month_start = tzutils.get_month_start(param)
|
||||||
|
self.assertIsNotNone(month_start.tzinfo)
|
||||||
|
self.assertEqual(month_start.replace(tzinfo=None),
|
||||||
|
datetime.datetime(2019, 1, 1))
|
||||||
|
|
||||||
|
def test_get_month_start_with_arg_naive(self):
|
||||||
|
param = datetime.datetime(2019, 1, 3, 4, 5)
|
||||||
|
month_start = tzutils.get_month_start(param, naive=True)
|
||||||
|
self.assertIsNone(month_start.tzinfo)
|
||||||
|
self.assertEqual(month_start, datetime.datetime(2019, 1, 1))
|
@ -14,24 +14,18 @@
|
|||||||
# under the License.
|
# under the License.
|
||||||
#
|
#
|
||||||
import copy
|
import copy
|
||||||
from datetime import datetime
|
|
||||||
import random
|
import random
|
||||||
|
|
||||||
from oslo_utils import uuidutils
|
from oslo_utils import uuidutils
|
||||||
|
|
||||||
from cloudkitty.tests import samples
|
from cloudkitty.tests import samples
|
||||||
from cloudkitty import utils as ck_utils
|
|
||||||
|
|
||||||
|
|
||||||
def generate_v2_storage_data(min_length=10,
|
def generate_v2_storage_data(min_length=10,
|
||||||
nb_projects=2,
|
nb_projects=2,
|
||||||
project_ids=None,
|
project_ids=None,
|
||||||
start=datetime(2018, 1, 1),
|
start=None,
|
||||||
end=datetime(2018, 1, 1, 1)):
|
end=None):
|
||||||
if isinstance(start, int):
|
|
||||||
start = ck_utils.ts2dt(start)
|
|
||||||
if isinstance(end, int):
|
|
||||||
end = ck_utils.ts2dt(end)
|
|
||||||
|
|
||||||
if not project_ids:
|
if not project_ids:
|
||||||
project_ids = [uuidutils.generate_uuid() for i in range(nb_projects)]
|
project_ids = [uuidutils.generate_uuid() for i in range(nb_projects)]
|
||||||
|
145
cloudkitty/tzutils.py
Normal file
145
cloudkitty/tzutils.py
Normal file
@ -0,0 +1,145 @@
|
|||||||
|
# Copyright 2019 Objectif Libre
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
# not use this file except in compliance with the License. You may obtain
|
||||||
|
# a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||||
|
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||||
|
# License for the specific language governing permissions and limitations
|
||||||
|
# under the License.
|
||||||
|
#
|
||||||
|
"""
|
||||||
|
Timezone-related utilities
|
||||||
|
"""
|
||||||
|
import calendar
|
||||||
|
import datetime
|
||||||
|
|
||||||
|
from dateutil import tz
|
||||||
|
from oslo_utils import timeutils
|
||||||
|
|
||||||
|
|
||||||
|
_LOCAL_TZ = tz.tzlocal()
|
||||||
|
|
||||||
|
|
||||||
|
def localized_now():
|
||||||
|
"""Returns a datetime object with timezone information."""
|
||||||
|
return datetime.datetime.now().replace(tzinfo=_LOCAL_TZ, microsecond=0)
|
||||||
|
|
||||||
|
|
||||||
|
def local_to_utc(dt, naive=False):
|
||||||
|
"""Converts a localized datetime object to UTC.
|
||||||
|
|
||||||
|
If no tz info is provided, the object will be considered as being already
|
||||||
|
in UTC, and the timezone will be set to UTC.
|
||||||
|
|
||||||
|
:param dt: object to convert
|
||||||
|
:type dt: datetime.datetime
|
||||||
|
:param naive: If True, remove timezone information from the final object.
|
||||||
|
Defaults to False.
|
||||||
|
:type naive: bool
|
||||||
|
:rtype: datetime.datetime
|
||||||
|
"""
|
||||||
|
# NOTE(peschk_l): In python2, astimezone() raises a ValueError if it is
|
||||||
|
# applied to a naive datetime object. In python3 however, the naive object
|
||||||
|
# is considered as being in the system's time.
|
||||||
|
if dt.tzinfo is None:
|
||||||
|
dt = dt.replace(tzinfo=tz.UTC)
|
||||||
|
|
||||||
|
output = dt.astimezone(tz.UTC)
|
||||||
|
if naive:
|
||||||
|
output = output.replace(tzinfo=None)
|
||||||
|
return output
|
||||||
|
|
||||||
|
|
||||||
|
def utc_to_local(dt):
|
||||||
|
"""Converts an UTC datetime object to a localized datetime object.
|
||||||
|
|
||||||
|
If no tz info is provided, the object will be considered as being UTC.
|
||||||
|
|
||||||
|
:param dt: object to convert
|
||||||
|
:type dt: datetime.datetime
|
||||||
|
:rtype: datetime.datetime
|
||||||
|
"""
|
||||||
|
if dt.tzinfo is None:
|
||||||
|
dt = dt.replace(tzinfo=tz.UTC)
|
||||||
|
return dt.astimezone(_LOCAL_TZ)
|
||||||
|
|
||||||
|
|
||||||
|
def dt_from_iso(time_str, as_utc=False):
|
||||||
|
"""Parses a timezone-aware datetime object from an iso8601 str.
|
||||||
|
|
||||||
|
Returns the object as being from the local timezone.
|
||||||
|
|
||||||
|
:param time_str: string to parse
|
||||||
|
:type time_str: str
|
||||||
|
:param as_utc: Return the datetime object as being from the UTC timezone
|
||||||
|
:type as_utc: bool
|
||||||
|
:rtype: datetime.datetime
|
||||||
|
"""
|
||||||
|
return timeutils.parse_isotime(time_str).astimezone(
|
||||||
|
tz.UTC if as_utc else _LOCAL_TZ).replace(microsecond=0)
|
||||||
|
|
||||||
|
|
||||||
|
def add_delta(dt, delta):
|
||||||
|
"""Adds a timedelta to a datetime object.
|
||||||
|
|
||||||
|
This is done by transforming the object to a naive UTC object, adding the
|
||||||
|
timedelta and transforming it back to a localized object. This helps to
|
||||||
|
avoid cases like this when transiting from winter to summertime:
|
||||||
|
|
||||||
|
>>> dt, delta
|
||||||
|
(datetime.datetime(2019, 3, 31, 0, 0, tzinfo=tzlocal()),
|
||||||
|
datetime.timedelta(0, 3600))
|
||||||
|
>>> dt += delta
|
||||||
|
>>> dt.isoformat()
|
||||||
|
'2019-03-31T01:00:00+01:00'
|
||||||
|
>>> dt += delta
|
||||||
|
>>> dt.isoformat()
|
||||||
|
'2019-03-31T02:00:00+02:00' # This is the same time as the previous one
|
||||||
|
"""
|
||||||
|
return utc_to_local(local_to_utc(dt, naive=True) + delta)
|
||||||
|
|
||||||
|
|
||||||
|
def get_month_start(dt=None, naive=False):
|
||||||
|
"""Returns the start of the month in the local timezone.
|
||||||
|
|
||||||
|
If no parameter is provided, returns the start of the current month. If
|
||||||
|
the provided parameter is naive, it will be considered as UTC and tzinfo
|
||||||
|
will be added, except if naive is True.
|
||||||
|
|
||||||
|
:param dt: Month to return the begin of.
|
||||||
|
:type dt: datetime.datetime
|
||||||
|
:param naive: If True, remove timezone information from the final object.
|
||||||
|
Defaults to False.
|
||||||
|
:type naive: bool
|
||||||
|
:rtype: datetime.datetime
|
||||||
|
"""
|
||||||
|
if not dt:
|
||||||
|
dt = localized_now()
|
||||||
|
if not dt.tzinfo:
|
||||||
|
dt = dt.replace(tzinfo=tz.UTC).astimezone(_LOCAL_TZ)
|
||||||
|
if naive:
|
||||||
|
dt = local_to_utc(dt, naive=True)
|
||||||
|
return datetime.datetime(dt.year, dt.month, 1, tzinfo=dt.tzinfo)
|
||||||
|
|
||||||
|
|
||||||
|
def get_next_month(dt=None, naive=False):
|
||||||
|
"""Returns the start of the next month in the local timezone.
|
||||||
|
|
||||||
|
If no parameter is provided, returns the start of the next month. If
|
||||||
|
the provided parameter is naive, it will be considered as UTC.
|
||||||
|
|
||||||
|
:param dt: Datetime to return the next month of.
|
||||||
|
:type dt: datetime.datetime
|
||||||
|
:param naive: If True, remove timezone information from the final object.
|
||||||
|
Defaults to False.
|
||||||
|
:type naive: bool
|
||||||
|
:rtype: datetime.datetime
|
||||||
|
"""
|
||||||
|
start = get_month_start(dt, naive=naive)
|
||||||
|
month_days = calendar.monthrange(start.year, start.month)[1]
|
||||||
|
return add_delta(start, datetime.timedelta(days=month_days))
|
@ -13,12 +13,6 @@
|
|||||||
# License for the specific language governing permissions and limitations
|
# License for the specific language governing permissions and limitations
|
||||||
# under the License.
|
# under the License.
|
||||||
#
|
#
|
||||||
"""
|
|
||||||
Time calculations functions
|
|
||||||
|
|
||||||
We're mostly using oslo_utils for time calculations but we're encapsulating it
|
|
||||||
to ease maintenance in case of library modifications.
|
|
||||||
"""
|
|
||||||
import calendar
|
import calendar
|
||||||
import contextlib
|
import contextlib
|
||||||
import datetime
|
import datetime
|
||||||
@ -37,6 +31,8 @@ from oslo_utils import timeutils
|
|||||||
from six import moves
|
from six import moves
|
||||||
from stevedore import extension
|
from stevedore import extension
|
||||||
|
|
||||||
|
from cloudkitty import tzutils
|
||||||
|
|
||||||
|
|
||||||
_ISO8601_TIME_FORMAT_SUBSECOND = '%Y-%m-%dT%H:%M:%S.%f'
|
_ISO8601_TIME_FORMAT_SUBSECOND = '%Y-%m-%dT%H:%M:%S.%f'
|
||||||
_ISO8601_TIME_FORMAT = '%Y-%m-%dT%H:%M:%S'
|
_ISO8601_TIME_FORMAT = '%Y-%m-%dT%H:%M:%S'
|
||||||
@ -126,11 +122,6 @@ def utcnow():
|
|||||||
return timeutils.utcnow()
|
return timeutils.utcnow()
|
||||||
|
|
||||||
|
|
||||||
def utcnow_ts():
|
|
||||||
"""Returns a timestamp for the current utc time."""
|
|
||||||
return timeutils.utcnow_ts()
|
|
||||||
|
|
||||||
|
|
||||||
def get_month_days(dt):
|
def get_month_days(dt):
|
||||||
return calendar.monthrange(dt.year, dt.month)[1]
|
return calendar.monthrange(dt.year, dt.month)[1]
|
||||||
|
|
||||||
@ -228,12 +219,12 @@ def check_time_state(timestamp=None, period=0, wait_periods=0):
|
|||||||
:rtype: datetime.datetime
|
:rtype: datetime.datetime
|
||||||
"""
|
"""
|
||||||
if not timestamp:
|
if not timestamp:
|
||||||
return get_month_start()
|
return tzutils.get_month_start()
|
||||||
|
|
||||||
period_delta = datetime.timedelta(seconds=period)
|
period_delta = datetime.timedelta(seconds=period)
|
||||||
next_timestamp = timestamp + period_delta
|
next_timestamp = tzutils.add_delta(timestamp, period_delta)
|
||||||
wait_time = wait_periods * period_delta
|
wait_time = wait_periods * period_delta
|
||||||
if next_timestamp + wait_time < utcnow():
|
if tzutils.add_delta(next_timestamp, wait_time) < tzutils.localized_now():
|
||||||
return next_timestamp
|
return next_timestamp
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
@ -0,0 +1,5 @@
|
|||||||
|
---
|
||||||
|
upgrade:
|
||||||
|
- |
|
||||||
|
CloudKitty is now aware of timezones, and the API supports iso8601
|
||||||
|
formatted timestamps.
|
Loading…
x
Reference in New Issue
Block a user