Introduce start and end dates on rating rules

This change affects both hashmap and pyscripts. This feature adds a time
to live for each rating rule. The change is also replacing the deletion
process from removing the DB rows to marking them as deleted, allowing
users to know when and who deleted a specific rule. Name and description
metadata were added to enrich the rules with more context of their
creation.

Change-Id: Icac45c8f3ac8b5d86a134b311de9a1a77932b003
Depends-On: https://review.opendev.org/c/openstack/cloudkitty-tempest-plugin/+/892382
Signed-off-by: Pedro Henrique <phpm13@gmail.com>
This commit is contained in:
Pedro Henrique
2023-01-09 08:52:31 -03:00
parent 44345dc26b
commit f9032687ee
40 changed files with 1245 additions and 201 deletions

View File

@@ -13,10 +13,17 @@
# License for the specific language governing permissions and limitations # License for the specific language governing permissions and limitations
# under the License. # under the License.
# #
import datetime
from oslo_log import log as logging
from oslo_utils import uuidutils from oslo_utils import uuidutils
from wsme.rest.json import tojson
from wsme import types as wtypes from wsme import types as wtypes
LOG = logging.getLogger(__name__)
class UuidType(wtypes.UuidType): class UuidType(wtypes.UuidType):
"""A simple UUID type.""" """A simple UUID type."""
basetype = wtypes.text basetype = wtypes.text
@@ -29,6 +36,43 @@ class UuidType(wtypes.UuidType):
return value return value
class EndDayDatetimeBaseType(datetime.datetime):
pass
@tojson.when_object(EndDayDatetimeBaseType)
def datetime_end_day_tojson(datatype, value):
if value is None:
return None
return value.isoformat()
class EndDayDatetime(wtypes.UserType):
basetype = EndDayDatetimeBaseType
name = 'end'
def validate(self, value):
if isinstance(value, datetime.datetime):
return value
token_that_splits_date_from_time_in_iso_format = 'T'
if token_that_splits_date_from_time_in_iso_format in value:
LOG.debug("There is a time in the end date [%s]; "
"therefore, we will maintain the time, "
"and use the datetime as is.", value)
return datetime.datetime.fromisoformat(value)
LOG.debug("The end date [%s] was not defined with a specific time, "
"using time [23:59:59] as end time.", value)
dt = datetime.datetime.fromisoformat(value)
return datetime.datetime(
year=dt.year, month=dt.month, day=dt.day,
hour=23, minute=59, second=59)
# Code taken from ironic types # Code taken from ironic types
class MultiType(wtypes.UserType): class MultiType(wtypes.UserType):
"""A complex type that represents one or more types. """A complex type that represents one or more types.

View File

@@ -13,10 +13,13 @@
# under the License. # under the License.
# #
import logging import logging
import pecan
import requests import requests
from keystoneauth1 import loading as ks_loading
from keystoneauth1 import session as ks_session from keystoneauth1 import session as ks_session
from keystoneclient.v3 import client as ks_client
from oslo_config import cfg
LOG = logging.getLogger(__name__) LOG = logging.getLogger(__name__)
@@ -30,3 +33,26 @@ def create_custom_session(session_options, pool_size):
pool_maxsize=pool_size) pool_maxsize=pool_size)
return ks_session.Session(session=session, **session_options) return ks_session.Session(session=session, **session_options)
def get_request_user():
conf = cfg.CONF
ks_auth = ks_loading.load_auth_from_conf_options(
conf, 'keystone_authtoken')
session = create_custom_session(
{'auth': ks_auth, 'verify': False}, 1)
keystone_client = ks_client.Client(
session=session,
interface=conf['keystone_authtoken'].interface)
keystone_token = pecan.request.headers.get('X-Auth-Token')
if not keystone_token:
LOG.debug("There is no auth token in the request header, using "
"'unknown' as the request user.")
return 'unknown'
token_data = ks_client.tokens.TokenManager(
keystone_client).get_token_data(
keystone_token)
session.session.close()
return token_data['token']['user']['id']

View File

@@ -292,9 +292,21 @@ class Worker(BaseWorker):
self._state = state.StateManager() self._state = state.StateManager()
self.next_timestamp_to_process = functools.partial( self.next_timestamp_to_process = functools.partial(
_check_state, self, self._period, self._tenant_id) _check_state, self, self._period, self._tenant_id)
self.refresh_rating_rules()
super(Worker, self).__init__(self._tenant_id) super(Worker, self).__init__(self._tenant_id)
def refresh_rating_rules(self):
for processor in self._processors:
processing_date = self.next_timestamp_to_process()
processor.obj.reload_config(processing_date)
data = getattr(processor.obj, '_entries',
getattr(processor.obj, '_script',
None))
LOG.debug("Reloading rating rules for processor [%s]"
" and scope [%s] at [%s] using rules [%s]",
processor.obj.module_name,
self._tenant_id, processing_date, data)
def _collect(self, metric, start_timestamp): def _collect(self, metric, start_timestamp):
next_timestamp = tzutils.add_delta( next_timestamp = tzutils.add_delta(
start_timestamp, timedelta(seconds=self._period)) start_timestamp, timedelta(seconds=self._period))
@@ -376,6 +388,7 @@ class Worker(BaseWorker):
def run(self): def run(self):
should_continue_processing = self.execute_worker_processing() should_continue_processing = self.execute_worker_processing()
while should_continue_processing: while should_continue_processing:
self.refresh_rating_rules()
should_continue_processing = self.execute_worker_processing() should_continue_processing = self.execute_worker_processing()
def execute_worker_processing(self): def execute_worker_processing(self):

View File

@@ -123,7 +123,7 @@ class RatingProcessorBase(object, metaclass=abc.ABCMeta):
""" """
@abc.abstractmethod @abc.abstractmethod
def reload_config(self): def reload_config(self, start=None):
"""Trigger configuration reload """Trigger configuration reload
""" """

View File

@@ -0,0 +1,66 @@
# -*- coding: utf-8 -*-
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
import datetime
from wsme import types as wtypes
from cloudkitty.api.v1 import types as ck_types
class VolatileAuditableModel(wtypes.Base):
created_at = wtypes.wsattr(datetime.datetime, mandatory=False,
default=None)
"""The date the rule was created."""
start = wtypes.wsattr(datetime.datetime, mandatory=False, default=None)
"""Must be None or a date in the future. To set a date in the past,
use the force parameter in the POST query."""
end = wtypes.wsattr(ck_types.EndDayDatetime(), mandatory=False,
default=None)
"""Must be None or a date in the future. To set a date in the past,
use the force parameter in the POST query."""
name = wtypes.wsattr(wtypes.text, mandatory=False, default=None)
"""The name of the rule."""
description = wtypes.wsattr(wtypes.text, mandatory=False, default=None)
"""The description of the rule."""
deleted = wtypes.wsattr(datetime.datetime, mandatory=False, default=None)
"""The date the rule was deleted."""
created_by = wtypes.wsattr(wtypes.text, mandatory=False, default=None)
"""The id of the user who created the rule."""
updated_by = wtypes.wsattr(wtypes.text, mandatory=False, default=None)
"""The id of the user who last updated the rule."""
deleted_by = wtypes.wsattr(wtypes.text, mandatory=False, default=None)
"""The id of the user who deleted the rule."""
@classmethod
def sample(cls):
sample = cls(created_at=datetime.datetime(2023, 1, 1, 10, 10, 10),
start=datetime.datetime(2023, 2, 1),
end=datetime.datetime(2023, 3, 1),
name='rule 1',
description='description',
deleted=datetime.datetime(2023, 1, 15),
created_by='7977999e2e2511e6a8b2df30b233ffcb',
updated_by='7977999e2e2511e6a8b2df30b233ffcb',
deleted_by='7977999e2e2511e6a8b2df30b233ffcb')
return sample

View File

View File

@@ -0,0 +1,53 @@
# -*- coding: utf-8 -*-
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
import datetime
import sqlalchemy as sa
def get_filters(query, model, deleted=False, start=None, end=None,
updated_by=None, created_by=None, deleted_by=None,
description=None, is_active=None):
if is_active:
if not isinstance(is_active, bool):
now = is_active
else:
now = datetime.datetime.now()
query = query.filter(model.start <= now)
query = query.filter(sa.or_(model.end > now, model.end == sa.null()))
query = query.filter(model.deleted == sa.null())
if description:
query = query.filter(model.description.ilike(f'%{description}%'))
if deleted_by:
query = query.filter(model.deleted_by == deleted_by)
if created_by:
query = query.filter(model.created_by == created_by)
if updated_by:
query = query.filter(model.updated_by == updated_by)
if not deleted:
query = query.filter(model.deleted == sa.null())
if start:
query = query.filter(model.start >= start)
if end:
query = query.filter(model.end < end)
return query

View File

@@ -0,0 +1,63 @@
# -*- coding: utf-8 -*-
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
import sqlalchemy as sa
def create_common_tables(batch_op):
batch_op.add_column(
sa.Column(
'created_at',
sa.DateTime(),
nullable=False,
server_default=sa.sql.func.now()
))
batch_op.add_column(
sa.Column(
'start',
sa.DateTime(),
nullable=False,
server_default=sa.sql.func.now()
))
batch_op.add_column(
sa.Column(
'end',
sa.DateTime(),
nullable=True))
batch_op.add_column(
sa.Column(
'description',
sa.Text(length=256),
nullable=True))
batch_op.add_column(
sa.Column(
'deleted',
sa.DateTime(),
nullable=True))
batch_op.add_column(
sa.Column(
'created_by',
sa.String(length=32),
nullable=False))
batch_op.add_column(
sa.Column(
'updated_by',
sa.String(length=32),
nullable=True))
batch_op.add_column(
sa.Column(
'deleted_by',
sa.String(length=32),
nullable=True))

View File

@@ -0,0 +1,60 @@
# -*- coding: utf-8 -*-
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
import datetime
import sqlalchemy
class VolatileAuditableModel:
created_at = sqlalchemy.Column(
'created_at',
sqlalchemy.DateTime(),
nullable=False,
default=datetime.datetime.now()
)
start = sqlalchemy.Column(
'start',
sqlalchemy.DateTime(),
nullable=False,
default=datetime.datetime.now()
)
end = sqlalchemy.Column(
'end',
sqlalchemy.DateTime(),
nullable=True)
name = sqlalchemy.Column(
'name',
sqlalchemy.String(length=32),
nullable=False)
description = sqlalchemy.Column(
'description',
sqlalchemy.String(length=256),
nullable=True)
deleted = sqlalchemy.Column(
'deleted',
sqlalchemy.DateTime(),
nullable=True)
created_by = sqlalchemy.Column(
'created_by',
sqlalchemy.String(length=32),
nullable=False)
updated_by = sqlalchemy.Column(
'updated_by',
sqlalchemy.String(length=32),
nullable=True)
deleted_by = sqlalchemy.Column(
'deleted_by',
sqlalchemy.String(length=32),
nullable=True)

View File

@@ -0,0 +1,87 @@
# -*- coding: utf-8 -*-
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
import datetime
import pecan
from oslo_log import log as logging
LOG = logging.getLogger(__name__)
def _resource_changed(current_resource, resource, ignore_attribute_check=None):
if not ignore_attribute_check:
ignore_attribute_check = []
resource_attributes = resource._wsme_attributes
for attribute in resource_attributes:
new_value = getattr(resource, attribute.key, None)
old_value = getattr(current_resource, attribute.key, None)
if attribute.key in ignore_attribute_check:
continue
if new_value and new_value != old_value:
return True
return False
def validate_update_allowing_only_end_date(current_resource, resource):
now = datetime.datetime.now()
if current_resource.start < now:
if current_resource.end is None:
if _resource_changed(current_resource, resource,
ignore_attribute_check=["end"]):
pecan.abort(
400, f'You are allowed to update '
f'only the attribute [end] as this rule is '
f'already running as it started on '
f'[{current_resource.start}]')
if resource.end and resource.end < now:
pecan.abort(
400, f'End date must be in the future. '
f'end=[{resource.end}] current time=[{now}]')
return True
pecan.abort(
400, 'Cannot update a rule that was already processed and '
'has a defined end date.')
else:
LOG.debug("Updating the rating rule [%s] with new data [%s] as it has "
"not been used yet.", current_resource, resource)
resource.name = None
if not resource.start:
resource.start = current_resource.start
validate_resource(resource)
return False
def validate_resource(resource, force=False):
start = resource.start
end = resource.end
now = datetime.datetime.now()
if not force and start and start < now:
pecan.abort(
400, f'Cannot create a rule with start in the past. '
f'start=[{start}] current time=[{now}].')
if end and end < start:
pecan.abort(
400, f'Cannot create a rule with start after end. '
f'start=[{start}] end=[{end}].')
if force:
LOG.info("Creating resource [%s] at [%s] with start date [%s] using "
"the flag 'Force'.", resource, now, start)

View File

@@ -41,11 +41,11 @@ class HashMap(rating.RatingProcessorBase):
self._res = {} self._res = {}
self._load_rates() self._load_rates()
def reload_config(self): def reload_config(self, start=None):
"""Reload the module's configuration. """Reload the module's configuration.
""" """
self._load_rates() self._load_rates(start)
def _load_mappings(self, mappings_uuid_list): def _load_mappings(self, mappings_uuid_list):
hashmap = hash_db_api.get_instance() hashmap = hash_db_api.get_instance()
@@ -93,13 +93,15 @@ class HashMap(rating.RatingProcessorBase):
root, root,
service_uuid=None, service_uuid=None,
field_uuid=None, field_uuid=None,
tenant_uuid=None): tenant_uuid=None,
start=None):
hashmap = hash_db_api.get_instance() hashmap = hash_db_api.get_instance()
list_func = getattr(hashmap, 'list_{}'.format(entry_type)) list_func = getattr(hashmap, 'list_{}'.format(entry_type))
entries_uuid_list = list_func( entries_uuid_list = list_func(
service_uuid=service_uuid, service_uuid=service_uuid,
field_uuid=field_uuid, field_uuid=field_uuid,
tenant_uuid=tenant_uuid) tenant_uuid=tenant_uuid,
is_active=start or True)
load_func = getattr(self, '_load_{}'.format(entry_type)) load_func = getattr(self, '_load_{}'.format(entry_type))
entries = load_func(entries_uuid_list) entries = load_func(entries_uuid_list)
if entry_type in root: if entry_type in root:
@@ -112,7 +114,7 @@ class HashMap(rating.RatingProcessorBase):
else: else:
root[entry_type] = entries root[entry_type] = entries
def _load_service_entries(self, service_name, service_uuid): def _load_service_entries(self, service_name, service_uuid, start=None):
self._entries[service_name] = dict() self._entries[service_name] = dict()
for entry_type in ('mappings', 'thresholds'): for entry_type in ('mappings', 'thresholds'):
for tenant in (None, self._tenant_id): for tenant in (None, self._tenant_id):
@@ -120,9 +122,11 @@ class HashMap(rating.RatingProcessorBase):
entry_type, entry_type,
self._entries[service_name], self._entries[service_name],
service_uuid=service_uuid, service_uuid=service_uuid,
tenant_uuid=tenant) tenant_uuid=tenant,
start=start)
def _load_field_entries(self, service_name, field_name, field_uuid): def _load_field_entries(self, service_name, field_name, field_uuid,
start=None):
if service_name not in self._entries: if service_name not in self._entries:
self._entries[service_name] = {} self._entries[service_name] = {}
if 'fields' not in self._entries[service_name]: if 'fields' not in self._entries[service_name]:
@@ -134,21 +138,23 @@ class HashMap(rating.RatingProcessorBase):
entry_type, entry_type,
scope, scope,
field_uuid=field_uuid, field_uuid=field_uuid,
tenant_uuid=tenant) tenant_uuid=tenant,
start=start)
def _load_rates(self): def _load_rates(self, start=None):
self._entries = {} self._entries = {}
hashmap = hash_db_api.get_instance() hashmap = hash_db_api.get_instance()
services_uuid_list = hashmap.list_services() services_uuid_list = hashmap.list_services()
for service_uuid in services_uuid_list: for service_uuid in services_uuid_list:
service_db = hashmap.get_service(uuid=service_uuid) service_db = hashmap.get_service(uuid=service_uuid)
service_name = service_db.name service_name = service_db.name
self._load_service_entries(service_name, service_uuid) self._load_service_entries(service_name, service_uuid, start)
fields_uuid_list = hashmap.list_fields(service_uuid) fields_uuid_list = hashmap.list_fields(service_uuid)
for field_uuid in fields_uuid_list: for field_uuid in fields_uuid_list:
field_db = hashmap.get_field(uuid=field_uuid) field_db = hashmap.get_field(uuid=field_uuid)
field_name = field_db.name field_name = field_db.name
self._load_field_entries(service_name, field_name, field_uuid) self._load_field_entries(service_name, field_name, field_uuid,
start)
def add_rating_informations(self, point): def add_rating_informations(self, point):
for entry in self._res.values(): for entry in self._res.values():

View File

@@ -13,11 +13,14 @@
# License for the specific language governing permissions and limitations # License for the specific language governing permissions and limitations
# under the License. # under the License.
# #
import datetime
import pecan import pecan
import wsmeext.pecan as wsme_pecan import wsmeext.pecan as wsme_pecan
from cloudkitty.api.v1 import types as ck_types from cloudkitty.api.v1 import types as ck_types
from cloudkitty.common.custom_session import get_request_user
from cloudkitty import rating from cloudkitty import rating
from cloudkitty.rating.common.validations import fields as field_validations
from cloudkitty.rating.hash.datamodels import group as group_models from cloudkitty.rating.hash.datamodels import group as group_models
from cloudkitty.rating.hash.datamodels import mapping as mapping_models from cloudkitty.rating.hash.datamodels import mapping as mapping_models
from cloudkitty.rating.hash.db import api as db_api from cloudkitty.rating.hash.db import api as db_api
@@ -53,6 +56,15 @@ class HashMapMappingsController(rating.RatingRestControllerBase):
bool, bool,
ck_types.UuidType(), ck_types.UuidType(),
bool, bool,
bool,
datetime.datetime,
datetime.datetime,
str,
str,
str,
str,
bool,
bool,
status_code=200) status_code=200)
def get_all(self, def get_all(self,
service_id=None, service_id=None,
@@ -60,7 +72,16 @@ class HashMapMappingsController(rating.RatingRestControllerBase):
group_id=None, group_id=None,
no_group=False, no_group=False,
tenant_id=None, tenant_id=None,
filter_tenant=False): filter_tenant=False,
deleted=False,
start=None,
end=None,
updated_by=None,
created_by=None,
deleted_by=None,
description=None,
is_active=None,
all=True):
"""Get the mapping list """Get the mapping list
:param service_id: Service UUID to filter on. :param service_id: Service UUID to filter on.
@@ -71,6 +92,15 @@ class HashMapMappingsController(rating.RatingRestControllerBase):
:param filter_tenant: Explicitly filter on tenant (default is to not :param filter_tenant: Explicitly filter on tenant (default is to not
filter on tenant). Useful if you want to filter filter on tenant). Useful if you want to filter
on tenant being None. on tenant being None.
:param deleted: Show deleted mappings.
:param start: Mappings with start after date.
:param end: Mappings with end before date.
:param updated_by: user uuid to filter on.
:param created_by: user uuid to filter on.
:param deleted_by: user uuid to filter on.
:param description: mapping that contains the text in description.
:param is_active: only active mappings.
:param: all: list all rules.
:return: List of every mappings. :return: List of every mappings.
""" """
hashmap = db_api.get_instance() hashmap = db_api.get_instance()
@@ -83,6 +113,15 @@ class HashMapMappingsController(rating.RatingRestControllerBase):
field_uuid=field_id, field_uuid=field_id,
group_uuid=group_id, group_uuid=group_id,
no_group=no_group, no_group=no_group,
deleted=deleted,
start=start,
end=end,
updated_by=updated_by,
created_by=created_by,
deleted_by=deleted_by,
description=description,
is_active=is_active,
all=all,
**search_opts) **search_opts)
for mapping_uuid in mappings_uuid_list: for mapping_uuid in mappings_uuid_list:
mapping_db = hashmap.get_mapping(uuid=mapping_uuid) mapping_db = hashmap.get_mapping(uuid=mapping_uuid)
@@ -107,15 +146,20 @@ class HashMapMappingsController(rating.RatingRestControllerBase):
pecan.abort(404, e.args[0]) pecan.abort(404, e.args[0])
@wsme_pecan.wsexpose(mapping_models.Mapping, @wsme_pecan.wsexpose(mapping_models.Mapping,
bool,
body=mapping_models.Mapping, body=mapping_models.Mapping,
status_code=201) status_code=201)
def post(self, mapping_data): def post(self, force=False, mapping_data=None):
"""Create a mapping. """Create a mapping.
:param force: Allows start and end in the past.
:param mapping_data: Informations about the mapping to create. :param mapping_data: Informations about the mapping to create.
""" """
hashmap = db_api.get_instance() hashmap = db_api.get_instance()
field_validations.validate_resource(
mapping_data, force=force)
try: try:
created_by = get_request_user()
mapping_db = hashmap.create_mapping( mapping_db = hashmap.create_mapping(
value=mapping_data.value, value=mapping_data.value,
map_type=mapping_data.map_type, map_type=mapping_data.map_type,
@@ -123,7 +167,12 @@ class HashMapMappingsController(rating.RatingRestControllerBase):
field_id=mapping_data.field_id, field_id=mapping_data.field_id,
group_id=mapping_data.group_id, group_id=mapping_data.group_id,
service_id=mapping_data.service_id, service_id=mapping_data.service_id,
tenant_id=mapping_data.tenant_id) tenant_id=mapping_data.tenant_id,
created_by=created_by,
start=mapping_data.start,
end=mapping_data.end,
name=mapping_data.name,
description=mapping_data.description)
pecan.response.location = pecan.request.path_url pecan.response.location = pecan.request.path_url
if pecan.response.location[-1] != '/': if pecan.response.location[-1] != '/':
pecan.response.location += '/' pecan.response.location += '/'
@@ -147,14 +196,23 @@ class HashMapMappingsController(rating.RatingRestControllerBase):
""" """
hashmap = db_api.get_instance() hashmap = db_api.get_instance()
try: try:
hashmap.update_mapping( updated_by = get_request_user()
mapping_id, current_mapping = hashmap.get_mapping(mapping_id)
mapping_id=mapping.mapping_id, if field_validations.validate_update_allowing_only_end_date(
value=mapping.value, current_mapping,
cost=mapping.cost, mapping):
map_type=mapping.map_type, hashmap.update_mapping(
group_id=mapping.group_id, mapping_id,
tenant_id=mapping.tenant_id) end=mapping.end,
updated_by=updated_by)
else:
hashmap.update_mapping(
mapping_id,
cost=mapping.cost,
start=mapping.start,
end=mapping.end,
updated_by=updated_by,
description=mapping.description)
pecan.response.headers['Location'] = pecan.request.path pecan.response.headers['Location'] = pecan.request.path
except db_api.MappingAlreadyExists as e: except db_api.MappingAlreadyExists as e:
pecan.abort(409, e.args[0]) pecan.abort(409, e.args[0])
@@ -172,7 +230,8 @@ class HashMapMappingsController(rating.RatingRestControllerBase):
:param mapping_id: UUID of the mapping to delete. :param mapping_id: UUID of the mapping to delete.
""" """
hashmap = db_api.get_instance() hashmap = db_api.get_instance()
deleted_by = get_request_user()
try: try:
hashmap.delete_mapping(uuid=mapping_id) hashmap.delete_mapping(mapping_id, deleted_by=deleted_by)
except db_api.NoSuchMapping as e: except db_api.NoSuchMapping as e:
pecan.abort(404, e.args[0]) pecan.abort(404, e.args[0])

View File

@@ -18,11 +18,12 @@ import decimal
from wsme import types as wtypes from wsme import types as wtypes
from cloudkitty.api.v1 import types as ck_types from cloudkitty.api.v1 import types as ck_types
from cloudkitty.rating.common.datamodels.models import VolatileAuditableModel
MAP_TYPE = wtypes.Enum(wtypes.text, 'flat', 'rate') MAP_TYPE = wtypes.Enum(wtypes.text, 'flat', 'rate')
class Mapping(wtypes.Base): class Mapping(VolatileAuditableModel):
"""Type describing a Mapping. """Type describing a Mapping.
A mapping is used to apply rating rules based on a value, if the parent is A mapping is used to apply rating rules based on a value, if the parent is
@@ -59,12 +60,14 @@ class Mapping(wtypes.Base):
@classmethod @classmethod
def sample(cls): def sample(cls):
sample = super().sample()
sample = cls(mapping_id='39dbd39d-f663-4444-a795-fb19d81af136', sample = cls(mapping_id='39dbd39d-f663-4444-a795-fb19d81af136',
field_id='ac55b000-a05b-4832-b2ff-265a034886ab', field_id='ac55b000-a05b-4832-b2ff-265a034886ab',
value='m1.micro', value='m1.micro',
map_type='flat', map_type='flat',
cost=decimal.Decimal('4.2'), cost=decimal.Decimal('4.2'),
tenant_id='7977999e-2e25-11e6-a8b2-df30b233ffcb') tenant_id='7977999e-2e25-11e6-a8b2-df30b233ffcb',
**sample.__dict__)
return sample return sample

View File

@@ -235,6 +235,13 @@ class HashMap(object, metaclass=abc.ABCMeta):
:param uuid: UUID of the mapping to get. :param uuid: UUID of the mapping to get.
""" """
@abc.abstractmethod
def get_mapping_by_name(self, name):
"""Return a mapping object.
:param name: name of the mapping to get.
"""
@abc.abstractmethod @abc.abstractmethod
def get_threshold(self, uuid): def get_threshold(self, uuid):
"""Return a threshold object. """Return a threshold object.
@@ -327,7 +334,12 @@ class HashMap(object, metaclass=abc.ABCMeta):
service_id=None, service_id=None,
field_id=None, field_id=None,
group_id=None, group_id=None,
tenant_id=None): tenant_id=None,
start=None,
end=None,
name=None,
description=None,
created_by=None):
"""Create a new service/field mapping. """Create a new service/field mapping.
:param cost: Rating value to apply to this mapping. :param cost: Rating value to apply to this mapping.
@@ -337,6 +349,11 @@ class HashMap(object, metaclass=abc.ABCMeta):
:param field_id: Field the mapping is applying to. :param field_id: Field the mapping is applying to.
:param group_id: The group of calculations to apply. :param group_id: The group of calculations to apply.
:param tenant_id: The tenant to apply calculations to. :param tenant_id: The tenant to apply calculations to.
:param start: The date the rule will start to be valid.
:param end: The date the rule will stop to be valid.
:param name: The rule name.
:param description: The rule description
:param created_by: The user who created the rule.
""" """
@abc.abstractmethod @abc.abstractmethod
@@ -404,10 +421,11 @@ class HashMap(object, metaclass=abc.ABCMeta):
""" """
@abc.abstractmethod @abc.abstractmethod
def delete_mapping(self, uuid): def delete_mapping(self, uuid, deleted_by=None):
"""Delete a mapping """Delete a mapping
:param uuid: UUID of the mapping to delete. :param uuid: UUID of the mapping to delete.
:param deleted_by: UUID of the user who deleted the mapping.
""" """
@abc.abstractmethod @abc.abstractmethod
def delete_threshold(self, uuid): def delete_threshold(self, uuid):

View File

@@ -0,0 +1,70 @@
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
"""Add start end dates and audit in hashmap mappings
Revision ID: 8a591f85865f
Revises: 4e0232ce
Create Date: 2023-03-06 14:22:00.000000
"""
from alembic import op
from cloudkitty import db
from cloudkitty.rating.common.db.migrations import create_common_tables
from cloudkitty.rating.hash.db.sqlalchemy import models
import datetime
import uuid
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '8a591f85865f'
down_revision = '4e0232ce'
def _update_start_date():
# Timestamp zero.
initial_start_date = datetime.datetime(year=1970, month=1, day=1,
tzinfo=datetime.timezone.utc)
with db.session_for_write() as session:
q = session.query(models.HashMapMapping)
mapping_db = q.with_for_update().all()
for entry in mapping_db:
entry.start = initial_start_date
entry.name = uuid.uuid4().hex.replace('-', '')
entry.created_by = 'migration'
def upgrade():
table_name = 'hashmap_mappings'
with op.batch_alter_table(
table_name) as batch_op:
# As we are not delete rows anymore, the constraint
# validations will be delegated to the application.
batch_op.drop_constraint(
'uniq_field_mapping',
type_='unique')
batch_op.drop_constraint(
'uniq_service_mapping',
type_='unique')
batch_op.add_column(
sa.Column(
'name',
sa.String(length=32),
nullable=False))
create_common_tables(batch_op)
_update_start_date()

View File

@@ -13,12 +13,15 @@
# License for the specific language governing permissions and limitations # License for the specific language governing permissions and limitations
# under the License. # under the License.
# #
import datetime
from oslo_db import exception from oslo_db import exception
from oslo_db.sqlalchemy import utils from oslo_db.sqlalchemy import utils
from oslo_utils import uuidutils from oslo_utils import uuidutils
import sqlalchemy import sqlalchemy
from cloudkitty import db from cloudkitty import db
from cloudkitty.rating.common.db.filters import get_filters
from cloudkitty.rating.hash.db import api from cloudkitty.rating.hash.db import api
from cloudkitty.rating.hash.db.sqlalchemy import migration from cloudkitty.rating.hash.db.sqlalchemy import migration
from cloudkitty.rating.hash.db.sqlalchemy import models from cloudkitty.rating.hash.db.sqlalchemy import models
@@ -99,6 +102,18 @@ class HashMap(api.HashMap):
except sqlalchemy.orm.exc.NoResultFound: except sqlalchemy.orm.exc.NoResultFound:
raise api.NoSuchMapping(uuid) raise api.NoSuchMapping(uuid)
def get_mapping_by_name(self, name):
with db.session_for_read() as session:
try:
q = session.query(models.HashMapMapping)
q = q.filter(
models.HashMapMapping.deleted == sqlalchemy.null(),
models.HashMapMapping.name == name)
res = q.all()
return res
except sqlalchemy.orm.exc.NoResultFound:
raise api.NoSuchMapping(name)
def get_threshold(self, uuid): def get_threshold(self, uuid):
with db.session_for_read() as session: with db.session_for_read() as session:
try: try:
@@ -165,6 +180,7 @@ class HashMap(api.HashMap):
field_uuid=None, field_uuid=None,
group_uuid=None, group_uuid=None,
no_group=False, no_group=False,
all=False,
**kwargs): **kwargs):
with db.session_for_read() as session: with db.session_for_read() as session:
@@ -178,13 +194,13 @@ class HashMap(api.HashMap):
q = q.join( q = q.join(
models.HashMapMapping.field) models.HashMapMapping.field)
q = q.filter(models.HashMapField.field_id == field_uuid) q = q.filter(models.HashMapField.field_id == field_uuid)
elif not service_uuid and not field_uuid and not group_uuid: elif not (service_uuid or field_uuid or group_uuid) and not all:
raise api.ClientHashMapError( raise api.ClientHashMapError(
'You must specify either service_uuid,' 'You must specify either service_uuid,'
' field_uuid or group_uuid.') ' field_uuid or group_uuid.')
if 'tenant_uuid' in kwargs: if 'tenant_uuid' in kwargs:
q = q.filter( q = q.filter(
models.HashMapMapping.tenant_id == kwargs.get( models.HashMapMapping.tenant_id == kwargs.pop(
'tenant_uuid')) 'tenant_uuid'))
if group_uuid: if group_uuid:
q = q.join( q = q.join(
@@ -192,6 +208,8 @@ class HashMap(api.HashMap):
q = q.filter(models.HashMapGroup.group_id == group_uuid) q = q.filter(models.HashMapGroup.group_id == group_uuid)
elif no_group: elif no_group:
q = q.filter(models.HashMapMapping.group_id == None) # noqa q = q.filter(models.HashMapMapping.group_id == None) # noqa
q = get_filters(q, models.HashMapMapping, **kwargs)
res = q.values( res = q.values(
models.HashMapMapping.mapping_id) models.HashMapMapping.mapping_id)
return [uuid[0] for uuid in res] return [uuid[0] for uuid in res]
@@ -282,7 +300,14 @@ class HashMap(api.HashMap):
service_id=None, service_id=None,
field_id=None, field_id=None,
group_id=None, group_id=None,
tenant_id=None): tenant_id=None,
start=None,
end=None,
name=None,
description=None,
created_by=None):
if not name:
name = uuidutils.generate_uuid(False)
if field_id and service_id: if field_id and service_id:
raise api.ClientHashMapError('You can only specify one parent.') raise api.ClientHashMapError('You can only specify one parent.')
elif not service_id and not field_id: elif not service_id and not field_id:
@@ -295,6 +320,11 @@ class HashMap(api.HashMap):
raise api.ClientHashMapError( raise api.ClientHashMapError(
'You must specify a value' 'You must specify a value'
' for a field mapping.') ' for a field mapping.')
created_at = datetime.datetime.now()
if not start:
start = created_at
field_fk = None field_fk = None
if field_id: if field_id:
field_db = self.get_field(uuid=field_id) field_db = self.get_field(uuid=field_id)
@@ -309,14 +339,89 @@ class HashMap(api.HashMap):
group_fk = group_db.id group_fk = group_db.id
try: try:
with db.session_for_write() as session: with db.session_for_write() as session:
field_map = models.HashMapMapping( map_model = models.HashMapMapping
q = session.query(map_model)
or_ = sqlalchemy.or_
and_ = sqlalchemy.and_
null_ = sqlalchemy.null()
filter_name = None
filter_value = None
if field_fk:
filter_name = map_model.field_id
filter_value = field_fk
if service_fk:
filter_name = map_model.service_id
filter_value = service_fk
q = q.filter(
map_model.deleted == null_)
if end:
date_filter = and_(
map_model.start < end,
or_(
map_model.end >= start,
map_model.end == null_
)
)
else:
date_filter = or_(
map_model.start > start,
map_model.end == null_
)
if filter_name:
name_filter = or_(
map_model.name == name,
sqlalchemy.and_(
date_filter,
and_(
map_model.value == value,
map_model.tenant_id == tenant_id,
filter_name == filter_value,
map_model.group_id == group_fk
)
)
)
else:
name_filter = or_(
map_model.name == name,
date_filter
)
q = q.filter(name_filter)
mapping_db = q.with_for_update().all()
if mapping_db:
if field_id:
puuid = field_id
ptype = 'field'
else:
puuid = service_id
ptype = 'service'
raise api.MappingAlreadyExists(
value,
puuid,
ptype,
tenant_id=tenant_id)
field_map = map_model(
mapping_id=uuidutils.generate_uuid(), mapping_id=uuidutils.generate_uuid(),
value=value, value=value,
cost=cost, cost=cost,
field_id=field_fk, field_id=field_fk,
service_id=service_fk, service_id=service_fk,
map_type=map_type, map_type=map_type,
tenant_id=tenant_id) tenant_id=tenant_id,
created_at=created_at,
start=start,
end=end,
name=name,
description=description,
deleted=None,
created_by=created_by,
updated_by=None,
deleted_by=None
)
if group_fk: if group_fk:
field_map.group_id = group_fk field_map.group_id = group_fk
session.add(field_map) session.add(field_map)
@@ -396,7 +501,8 @@ class HashMap(api.HashMap):
with db.session_for_write() as session: with db.session_for_write() as session:
q = session.query(models.HashMapMapping) q = session.query(models.HashMapMapping)
q = q.filter( q = q.filter(
models.HashMapMapping.mapping_id == uuid) models.HashMapMapping.mapping_id == uuid,
models.HashMapMapping.deleted == sqlalchemy.null())
mapping_db = q.with_for_update().one() mapping_db = q.with_for_update().one()
if kwargs: if kwargs:
# NOTE(sheeprine): We want to check that value is not set # NOTE(sheeprine): We want to check that value is not set
@@ -520,15 +626,18 @@ class HashMap(api.HashMap):
session.delete(threshold) session.delete(threshold)
q.delete() q.delete()
def delete_mapping(self, uuid): def delete_mapping(self, uuid, deleted_by=None):
with db.session_for_write() as session: try:
q = utils.model_query( with db.session_for_write() as session:
models.HashMapMapping, q = session.query(models.HashMapMapping)
session) q = q.filter(
q = q.filter(models.HashMapMapping.mapping_id == uuid) models.HashMapMapping.mapping_id == uuid,
r = q.delete() models.HashMapMapping.deleted == sqlalchemy.null())
if not r: mapping_db = q.with_for_update().one()
raise api.NoSuchMapping(uuid) mapping_db.deleted_by = deleted_by
mapping_db.deleted = datetime.datetime.now()
except sqlalchemy.orm.exc.NoResultFound:
raise api.NoSuchMapping(uuid)
def delete_threshold(self, uuid): def delete_threshold(self, uuid):
with db.session_for_write() as session: with db.session_for_write() as session:

View File

@@ -20,6 +20,7 @@ from sqlalchemy import orm
from sqlalchemy import schema from sqlalchemy import schema
from cloudkitty.common.db import models as ck_models from cloudkitty.common.db import models as ck_models
from cloudkitty.rating.common.db.models import VolatileAuditableModel
Base = ck_models.get_base() Base = ck_models.get_base()
@@ -190,7 +191,7 @@ class HashMapGroup(Base, HashMapBase):
name=self.name) name=self.name)
class HashMapMapping(Base, HashMapBase): class HashMapMapping(Base, HashMapBase, VolatileAuditableModel):
"""A mapping between a field or service, a value and a type. """A mapping between a field or service, a value and a type.
Used to model final equation. Used to model final equation.

View File

@@ -33,7 +33,7 @@ class Noop(rating.RatingProcessorBase):
def priority(self): def priority(self):
return 1 return 1
def reload_config(self): def reload_config(self, start=None):
pass pass
def process(self, data): def process(self, data):

View File

@@ -44,9 +44,9 @@ class PyScripts(rating.RatingProcessorBase):
self.load_scripts_in_memory() self.load_scripts_in_memory()
super(PyScripts, self).__init__(tenant_id) super(PyScripts, self).__init__(tenant_id)
def load_scripts_in_memory(self): def load_scripts_in_memory(self, start=None):
db = pyscripts_db_api.get_instance() db = pyscripts_db_api.get_instance()
scripts_uuid_list = db.list_scripts() scripts_uuid_list = db.list_scripts(is_active=start or True)
self.purge_removed_scripts(scripts_uuid_list) self.purge_removed_scripts(scripts_uuid_list)
# Load or update script # Load or update script
@@ -87,12 +87,12 @@ class PyScripts(rating.RatingProcessorBase):
del self._scripts[script_uuid] del self._scripts[script_uuid]
def reload_config(self): def reload_config(self, start=None):
"""Reload the module's configuration. """Reload the module's configuration.
""" """
LOG.debug("Executing the reload of configurations.") LOG.debug("Executing the reload of configurations.")
self.load_scripts_in_memory() self.load_scripts_in_memory(start)
LOG.debug("Configurations reloaded.") LOG.debug("Configurations reloaded.")
def start_script(self, code, data): def start_script(self, code, data):

View File

@@ -13,12 +13,15 @@
# License for the specific language governing permissions and limitations # License for the specific language governing permissions and limitations
# under the License. # under the License.
# #
import datetime
import pecan import pecan
from wsme import types as wtypes from wsme import types as wtypes
import wsmeext.pecan as wsme_pecan import wsmeext.pecan as wsme_pecan
from cloudkitty.api.v1 import types as ck_types from cloudkitty.api.v1 import types as ck_types
from cloudkitty.common.custom_session import get_request_user
from cloudkitty import rating from cloudkitty import rating
from cloudkitty.rating.common.validations import fields as field_validations
from cloudkitty.rating.pyscripts.datamodels import script as script_models from cloudkitty.rating.pyscripts.datamodels import script as script_models
from cloudkitty.rating.pyscripts.db import api as db_api from cloudkitty.rating.pyscripts.db import api as db_api
@@ -39,18 +42,52 @@ class PyScriptsScriptsController(rating.RatingRestControllerBase):
data = data.encode('utf-8') data = data.encode('utf-8')
return data return data
@wsme_pecan.wsexpose(script_models.ScriptCollection, bool) @wsme_pecan.wsexpose(script_models.ScriptCollection,
def get_all(self, no_data=False): bool,
bool,
datetime.datetime,
datetime.datetime,
str,
str,
str,
str,
bool)
def get_all(self, no_data=False,
deleted=False,
start=None,
end=None,
updated_by=None,
created_by=None,
deleted_by=None,
description=None,
is_active=None):
"""Get the script list """Get the script list
:param no_data: Set to True to remove script data from output. :param no_data: Set to True to remove script data from output.
:param deleted: Show deleted mappings.
:param start: Mappings with start after date.
:param end: Mappings with end before date.
:param updated_by: user uuid to filter on.
:param created_by: user uuid to filter on.
:param deleted_by: user uuid to filter on.
:param description: mapping that contains the text in description.
:param is_active: only active mappings.
:return: List of every scripts. :return: List of every scripts.
""" """
pyscripts = db_api.get_instance() pyscripts = db_api.get_instance()
script_list = [] script_list = []
script_uuid_list = pyscripts.list_scripts() script_uuid_list = pyscripts.list_scripts(
deleted=deleted,
start=start,
end=end,
updated_by=updated_by,
created_by=created_by,
deleted_by=deleted_by,
description=description,
is_active=is_active)
for script_uuid in script_uuid_list: for script_uuid in script_uuid_list:
script_db = pyscripts.get_script(uuid=script_uuid) script_db = pyscripts.get_script(uuid=script_uuid,
deleted=deleted)
script = script_db.export_model() script = script_db.export_model()
if no_data: if no_data:
del script['data'] del script['data']
@@ -73,17 +110,27 @@ class PyScriptsScriptsController(rating.RatingRestControllerBase):
pecan.abort(404, e.args[0]) pecan.abort(404, e.args[0])
@wsme_pecan.wsexpose(script_models.Script, @wsme_pecan.wsexpose(script_models.Script,
bool,
body=script_models.Script, body=script_models.Script,
status_code=201) status_code=201)
def post(self, script_data): def post(self, force=False, script_data=None):
"""Create pyscripts script. """Create pyscripts script.
:param force: Allows start and end in the past.
:param script_data: Informations about the script to create. :param script_data: Informations about the script to create.
""" """
pyscripts = db_api.get_instance() pyscripts = db_api.get_instance()
field_validations.validate_resource(
script_data, force=force)
try: try:
created_by = get_request_user()
data = self.normalize_data(script_data.data) data = self.normalize_data(script_data.data)
script_db = pyscripts.create_script(script_data.name, data) script_db = pyscripts.create_script(
script_data.name, data,
created_by=created_by,
start=script_data.start,
end=script_data.end,
description=script_data.description)
pecan.response.location = pecan.request.path_url pecan.response.location = pecan.request.path_url
if pecan.response.location[-1] != '/': if pecan.response.location[-1] != '/':
pecan.response.location += '/' pecan.response.location += '/'
@@ -105,10 +152,21 @@ class PyScriptsScriptsController(rating.RatingRestControllerBase):
""" """
pyscripts = db_api.get_instance() pyscripts = db_api.get_instance()
try: try:
updated_by = get_request_user()
current_script = pyscripts.get_script(uuid=script_id)
data = self.normalize_data(script_data.data) data = self.normalize_data(script_data.data)
script_db = pyscripts.update_script(script_id, if field_validations.validate_update_allowing_only_end_date(
name=script_data.name, current_script,
data=data) script_data):
script_db = pyscripts.update_script(
script_id, end=script_data.end,
updated_by=updated_by)
else:
script_db = pyscripts.update_script(
script_id, data=data, updated_by=updated_by,
end=script_data.end,
description=script_data.description,
start=script_data.start)
pecan.response.location = pecan.request.path_url pecan.response.location = pecan.request.path_url
if pecan.response.location[-1] != '/': if pecan.response.location[-1] != '/':
pecan.response.location += '/' pecan.response.location += '/'
@@ -125,7 +183,8 @@ class PyScriptsScriptsController(rating.RatingRestControllerBase):
:param script_id: UUID of the script to delete. :param script_id: UUID of the script to delete.
""" """
pyscripts = db_api.get_instance() pyscripts = db_api.get_instance()
deleted_by = get_request_user()
try: try:
pyscripts.delete_script(uuid=script_id) pyscripts.delete_script(uuid=script_id, deleted_by=deleted_by)
except db_api.NoSuchScript as e: except db_api.NoSuchScript as e:
pecan.abort(404, e.args[0]) pecan.abort(404, e.args[0])

View File

@@ -16,9 +16,10 @@
from wsme import types as wtypes from wsme import types as wtypes
from cloudkitty.api.v1 import types as ck_types from cloudkitty.api.v1 import types as ck_types
from cloudkitty.rating.common.datamodels.models import VolatileAuditableModel
class Script(wtypes.Base): class Script(VolatileAuditableModel):
"""Type describing a script. """Type describing a script.
""" """
@@ -37,12 +38,14 @@ class Script(wtypes.Base):
@classmethod @classmethod
def sample(cls): def sample(cls):
sample = super().sample()
sample = cls(script_id='bc05108d-f515-4984-8077-de319cbf35aa', sample = cls(script_id='bc05108d-f515-4984-8077-de319cbf35aa',
name='policy1', name='policy1',
data='return 0', data='return 0',
checksum='cf83e1357eefb8bdf1542850d66d8007d620e4050b5715d' checksum='cf83e1357eefb8bdf1542850d66d8007d620e4050b5715d'
'c83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec' 'c83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec'
'2f63b931bd47417a81a538327af927da3e') '2f63b931bd47417a81a538327af927da3e',
**sample.__dict__)
return sample return sample

View File

@@ -64,25 +64,34 @@ class PyScripts(object, metaclass=abc.ABCMeta):
""" """
@abc.abstractmethod @abc.abstractmethod
def get_script(self, name=None, uuid=None): def get_script(self, name=None, uuid=None, deleted=None):
"""Return a script object. """Return a script object.
:param name: Filter on a script name. :param name: Filter on a script name.
:param uuid: The uuid of the script to get. :param uuid: The uuid of the script to get.
:param deleted: Show deleted script.
""" """
@abc.abstractmethod @abc.abstractmethod
def list_scripts(self): def list_scripts(self, **kwargs):
"""Return a UUID list of every scripts available. """Return a UUID list of every scripts available.
""" """
@abc.abstractmethod @abc.abstractmethod
def create_script(self, name, data): def create_script(self, name, data,
start=None,
end=None,
description=None,
created_by=None):
"""Create a new script. """Create a new script.
:param name: Name of the script to create. :param name: Name of the script to create.
:param data: Content of the python script. :param data: Content of the python script.
:param start: The date the script will start to be valid.
:param end: The date the script will stop to be valid.
:param description: The script description
:param created_by: The user who created the script.
""" """
@abc.abstractmethod @abc.abstractmethod

View File

@@ -0,0 +1,58 @@
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
"""Add start end dates and audit in pyscripts
Revision ID: c6e4cda29654
Revises: 75c205f6f1a2
Create Date: 2023-03-06 14:22:00.000000
"""
from alembic import op
from cloudkitty import db
from cloudkitty.rating.common.db.migrations import create_common_tables
from cloudkitty.rating.pyscripts.db.sqlalchemy import models
import datetime
# revision identifiers, used by Alembic.
revision = 'c6e4cda29654'
down_revision = '75c205f6f1a2'
def _update_start_date():
# Year of the start of the project (not the first version)
initial_start_date = datetime.datetime(year=2014, month=1, day=1)
with db.session_for_write() as session:
q = session.query(models.PyScriptsScript)
mapping_db = q.with_for_update().all()
for entry in mapping_db:
entry.start = initial_start_date
entry.created_by = 'migration'
def upgrade():
table_name = 'pyscripts_scripts'
is_sqlite = op.get_context().dialect.name == 'sqlite'
with op.batch_alter_table(
table_name) as batch_op:
if not is_sqlite:
batch_op.drop_constraint(
'name',
type_='unique')
create_common_tables(batch_op)
_update_start_date()

View File

@@ -13,12 +13,14 @@
# License for the specific language governing permissions and limitations # License for the specific language governing permissions and limitations
# under the License. # under the License.
# #
import datetime
from oslo_db import exception from oslo_db import exception
from oslo_db.sqlalchemy import utils
from oslo_utils import uuidutils from oslo_utils import uuidutils
import sqlalchemy import sqlalchemy
from cloudkitty import db from cloudkitty import db
from cloudkitty.rating.common.db.filters import get_filters
from cloudkitty.rating.pyscripts.db import api from cloudkitty.rating.pyscripts.db import api
from cloudkitty.rating.pyscripts.db.sqlalchemy import migration from cloudkitty.rating.pyscripts.db.sqlalchemy import migration
from cloudkitty.rating.pyscripts.db.sqlalchemy import models from cloudkitty.rating.pyscripts.db.sqlalchemy import models
@@ -33,7 +35,7 @@ class PyScripts(api.PyScripts):
def get_migration(self): def get_migration(self):
return migration return migration
def get_script(self, name=None, uuid=None): def get_script(self, name=None, uuid=None, deleted=False):
with db.session_for_read() as session: with db.session_for_read() as session:
try: try:
q = session.query(models.PyScriptsScript) q = session.query(models.PyScriptsScript)
@@ -45,22 +47,51 @@ class PyScripts(api.PyScripts):
models.PyScriptsScript.script_id == uuid) models.PyScriptsScript.script_id == uuid)
else: else:
raise ValueError('You must specify either name or uuid.') raise ValueError('You must specify either name or uuid.')
if not deleted:
q = q.filter(
models.PyScriptsScript.deleted == sqlalchemy.null())
res = q.one() res = q.one()
return res return res
except sqlalchemy.orm.exc.NoResultFound: except sqlalchemy.orm.exc.NoResultFound:
raise api.NoSuchScript(name=name, uuid=uuid) raise api.NoSuchScript(name=name, uuid=uuid)
def list_scripts(self): def list_scripts(self, **kwargs):
with db.session_for_read() as session: with db.session_for_read() as session:
q = session.query(models.PyScriptsScript) q = session.query(models.PyScriptsScript)
q = get_filters(q, models.PyScriptsScript, **kwargs)
res = q.values( res = q.values(
models.PyScriptsScript.script_id) models.PyScriptsScript.script_id)
return [uuid[0] for uuid in res] return [uuid[0] for uuid in res]
def create_script(self, name, data): def create_script(self, name, data,
start=None,
end=None,
description=None,
created_by=None):
created_at = datetime.datetime.now()
try: try:
with db.session_for_write() as session: with db.session_for_write() as session:
script_db = models.PyScriptsScript(name=name) q = session.query(models.PyScriptsScript)
q = q.filter(
models.PyScriptsScript.name == name,
models.PyScriptsScript.deleted == sqlalchemy.null()
)
script_db = q.with_for_update().all()
if script_db:
script_db = self.get_script(name=name)
raise api.ScriptAlreadyExists(
script_db.name,
script_db.script_id)
script_db = models.PyScriptsScript(
name=name,
start=start,
created_at=created_at,
end=end,
description=description,
deleted=None,
created_by=created_by,
updated_by=None,
deleted_by=None)
script_db.data = data script_db.data = data
script_db.script_id = uuidutils.generate_uuid() script_db.script_id = uuidutils.generate_uuid()
session.add(script_db) session.add(script_db)
@@ -76,7 +107,8 @@ class PyScripts(api.PyScripts):
with db.session_for_write() as session: with db.session_for_write() as session:
q = session.query(models.PyScriptsScript) q = session.query(models.PyScriptsScript)
q = q.filter( q = q.filter(
models.PyScriptsScript.script_id == uuid models.PyScriptsScript.script_id == uuid,
models.PyScriptsScript.deleted == sqlalchemy.null()
) )
script_db = q.with_for_update().one() script_db = q.with_for_update().one()
if kwargs: if kwargs:
@@ -96,17 +128,22 @@ class PyScripts(api.PyScripts):
except sqlalchemy.orm.exc.NoResultFound: except sqlalchemy.orm.exc.NoResultFound:
raise api.NoSuchScript(uuid=uuid) raise api.NoSuchScript(uuid=uuid)
def delete_script(self, name=None, uuid=None): def delete_script(self, name=None, uuid=None, deleted_by=None):
with db.session_for_write() as session: with db.session_for_write() as session:
q = utils.model_query( try:
models.PyScriptsScript, q = session.query(models.PyScriptsScript)
session) if name:
if name: q = q.filter(models.PyScriptsScript.name == name)
q = q.filter(models.PyScriptsScript.name == name) elif uuid:
elif uuid: q = q.filter(models.PyScriptsScript.script_id == uuid)
q = q.filter(models.PyScriptsScript.script_id == uuid) else:
else: raise ValueError(
raise ValueError('You must specify either name or uuid.') 'You must specify either name or uuid.')
r = q.delete() q = q.filter(
if not r: models.PyScriptsScript.deleted == sqlalchemy.null())
script_db = q.with_for_update().one()
script_db.deleted_by = deleted_by
script_db.deleted = datetime.datetime.now()
except sqlalchemy.orm.exc.NoResultFound:
raise api.NoSuchScript(uuid=uuid) raise api.NoSuchScript(uuid=uuid)

View File

@@ -21,6 +21,8 @@ import sqlalchemy
from sqlalchemy.ext import declarative from sqlalchemy.ext import declarative
from sqlalchemy.ext import hybrid from sqlalchemy.ext import hybrid
from cloudkitty.rating.common.db.models import VolatileAuditableModel
Base = declarative.declarative_base() Base = declarative.declarative_base()
@@ -59,7 +61,7 @@ class PyScriptsBase(models.ModelBase):
return res return res
class PyScriptsScript(Base, PyScriptsBase): class PyScriptsScript(Base, PyScriptsBase, VolatileAuditableModel):
"""A PyScripts entry. """A PyScripts entry.
""" """

View File

@@ -55,7 +55,7 @@ class FakeRatingModule(rating.RatingProcessorBase):
entry['rating'] = {'price': decimal.Decimal(0)} entry['rating'] = {'price': decimal.Decimal(0)}
return data return data
def reload_config(self): def reload_config(self, start=None):
pass pass
def notify_reload(self): def notify_reload(self):

View File

@@ -54,18 +54,26 @@ from cloudkitty.utils import tz as tzutils
INITIAL_DT = datetime.datetime(2015, 1, 1, tzinfo=tz.tzutc()) INITIAL_DT = datetime.datetime(2015, 1, 1, tzinfo=tz.tzutc())
CURRENT = 0
class UUIDFixture(fixture.GabbiFixture): class UUIDFixture(fixture.GabbiFixture):
def incremental_uuid(self):
global CURRENT
CURRENT = CURRENT + 1
return f'6c1b8a30-797f-4b7e-ad66-{str(CURRENT).zfill(12)}'
def start_fixture(self): def start_fixture(self):
FAKE_UUID = '6c1b8a30-797f-4b7e-ad66-9879b79059fb'
patcher = mock.patch( patcher = mock.patch(
'oslo_utils.uuidutils.generate_uuid', 'oslo_utils.uuidutils.generate_uuid',
return_value=FAKE_UUID) side_effect=self.incremental_uuid)
patcher.start() patcher.start()
self.patcher = patcher self.patcher = patcher
def stop_fixture(self): def stop_fixture(self):
global CURRENT
CURRENT = 0
self.patcher.stop() self.patcher.stop()

View File

@@ -68,6 +68,8 @@ tests:
data: data:
service_id: "371bcd08-009f-11e6-91de-8745729038b2" service_id: "371bcd08-009f-11e6-91de-8745729038b2"
type: "fail" type: "fail"
start: "3000-01-01"
name: 'teste3'
cost: "0.2" cost: "0.2"
status: 400 status: 400
response_strings: response_strings:
@@ -117,7 +119,7 @@ tests:
- "No such mapping: 42" - "No such mapping: 42"
- name: create a field mapping to check updates - name: create a field mapping to check updates
url: /v1/rating/module_config/hashmap/mappings url: /v1/rating/module_config/hashmap/mappings?force=true
method: POST method: POST
request_headers: request_headers:
content-type: application/json content-type: application/json
@@ -125,6 +127,9 @@ tests:
data: data:
field_id: $ENVIRON['hash_error_field_id'] field_id: $ENVIRON['hash_error_field_id']
type: "flat" type: "flat"
start: "2014-01-01"
end: "2014-01-02"
name: 'teste4'
cost: "0.2" cost: "0.2"
value: "fail" value: "fail"
status: 201 status: 201
@@ -141,7 +146,7 @@ tests:
value: '' value: ''
status: 400 status: 400
response_strings: response_strings:
- "You must specify a value for a field mapping." - "Cannot update a rule that was already processed and has a defined end date."
- name: create a service mapping with an invalid service_id - name: create a service mapping with an invalid service_id
url: /v1/rating/module_config/hashmap/mappings url: /v1/rating/module_config/hashmap/mappings
@@ -152,6 +157,8 @@ tests:
data: data:
service_id: "de23e3fe-0097-11e6-a44d-2b09512e61d9" service_id: "de23e3fe-0097-11e6-a44d-2b09512e61d9"
type: "flat" type: "flat"
start: "3000-01-01"
name: 'teste5'
cost: "0.2" cost: "0.2"
status: 400 status: 400
response_strings: response_strings:
@@ -167,6 +174,8 @@ tests:
field_id: "de23e3fe-0097-11e6-a44d-2b09512e61d9" field_id: "de23e3fe-0097-11e6-a44d-2b09512e61d9"
type: "flat" type: "flat"
cost: "0.2" cost: "0.2"
start: "3000-01-01"
name: 'teste6'
value: "fail" value: "fail"
status: 400 status: 400
response_strings: response_strings:
@@ -212,6 +221,8 @@ tests:
service_id: "de23e3fe-0097-11e6-a44d-2b09512e61d9" service_id: "de23e3fe-0097-11e6-a44d-2b09512e61d9"
field_id: "de23e3fe-0097-11e6-a44d-2b09512e61d9" field_id: "de23e3fe-0097-11e6-a44d-2b09512e61d9"
type: "flat" type: "flat"
start: "3000-01-01"
name: 'teste7'
cost: "0.2" cost: "0.2"
status: 400 status: 400
response_strings: response_strings:
@@ -225,6 +236,8 @@ tests:
x-roles: admin x-roles: admin
data: data:
type: "flat" type: "flat"
start: "3000-01-01"
name: 'teste8'
cost: "0.2" cost: "0.2"
value: "fail" value: "fail"
status: 400 status: 400
@@ -240,6 +253,8 @@ tests:
data: data:
field_id: $ENVIRON['hash_error_field_id'] field_id: $ENVIRON['hash_error_field_id']
type: "flat" type: "flat"
start: "3000-01-01"
name: 'teste9'
cost: "0.2" cost: "0.2"
status: 400 status: 400
response_strings: response_strings:

View File

@@ -14,10 +14,10 @@ tests:
name: "cpu" name: "cpu"
status: 201 status: 201
response_json_paths: response_json_paths:
$.service_id: "6c1b8a30-797f-4b7e-ad66-9879b79059fb" $.service_id: "6c1b8a30-797f-4b7e-ad66-000000000001"
$.name: "cpu" $.name: "cpu"
response_headers: response_headers:
location: $SCHEME://$NETLOC/v1/rating/module_config/hashmap/services/6c1b8a30-797f-4b7e-ad66-9879b79059fb location: $SCHEME://$NETLOC/v1/rating/module_config/hashmap/services/6c1b8a30-797f-4b7e-ad66-000000000001
- name: check redirect on service mapping creation - name: check redirect on service mapping creation
url: /v1/rating/module_config/hashmap/mappings url: /v1/rating/module_config/hashmap/mappings
@@ -26,20 +26,22 @@ tests:
content-type: application/json content-type: application/json
x-roles: admin x-roles: admin
data: data:
service_id: "6c1b8a30-797f-4b7e-ad66-9879b79059fb" service_id: "6c1b8a30-797f-4b7e-ad66-000000000001"
type: "flat" type: "flat"
start: "3000-01-01"
name: 'teste'
cost: "0.1000000000000000055511151231" cost: "0.1000000000000000055511151231"
status: 201 status: 201
response_json_paths: response_json_paths:
$.mapping_id: "6c1b8a30-797f-4b7e-ad66-9879b79059fb" $.mapping_id: "6c1b8a30-797f-4b7e-ad66-000000000002"
$.service_id: "6c1b8a30-797f-4b7e-ad66-9879b79059fb" $.service_id: "6c1b8a30-797f-4b7e-ad66-000000000001"
$.type: "flat" $.type: "flat"
$.cost: "0.1000000000000000055511151231" $.cost: "0.1000000000000000055511151231"
response_headers: response_headers:
location: '$SCHEME://$NETLOC/v1/rating/module_config/hashmap/mappings/6c1b8a30-797f-4b7e-ad66-9879b79059fb' location: '$SCHEME://$NETLOC/v1/rating/module_config/hashmap/mappings/6c1b8a30-797f-4b7e-ad66-000000000002'
- name: delete test mapping - name: delete test mapping
url: /v1/rating/module_config/hashmap/mappings/6c1b8a30-797f-4b7e-ad66-9879b79059fb url: /v1/rating/module_config/hashmap/mappings/6c1b8a30-797f-4b7e-ad66-000000000002
method: DELETE method: DELETE
status: 204 status: 204
@@ -50,22 +52,22 @@ tests:
content-type: application/json content-type: application/json
x-roles: admin x-roles: admin
data: data:
service_id: "6c1b8a30-797f-4b7e-ad66-9879b79059fb" service_id: "6c1b8a30-797f-4b7e-ad66-000000000001"
level: "2" level: "2"
type: "flat" type: "flat"
cost: "0.10000000" cost: "0.10000000"
status: 201 status: 201
response_json_paths: response_json_paths:
$.threshold_id: "6c1b8a30-797f-4b7e-ad66-9879b79059fb" $.threshold_id: "6c1b8a30-797f-4b7e-ad66-000000000003"
$.service_id: "6c1b8a30-797f-4b7e-ad66-9879b79059fb" $.service_id: "6c1b8a30-797f-4b7e-ad66-000000000001"
$.level: "2.00000000" $.level: "2.00000000"
$.type: "flat" $.type: "flat"
$.cost: "0.1000000000000000055511151231" $.cost: "0.1000000000000000055511151231"
response_headers: response_headers:
location: '$SCHEME://$NETLOC/v1/rating/module_config/hashmap/thresholds/6c1b8a30-797f-4b7e-ad66-9879b79059fb' location: '$SCHEME://$NETLOC/v1/rating/module_config/hashmap/thresholds/6c1b8a30-797f-4b7e-ad66-000000000003'
- name: delete test threshold - name: delete test threshold
url: /v1/rating/module_config/hashmap/thresholds/6c1b8a30-797f-4b7e-ad66-9879b79059fb url: /v1/rating/module_config/hashmap/thresholds/6c1b8a30-797f-4b7e-ad66-000000000003
method: DELETE method: DELETE
status: 204 status: 204
@@ -76,15 +78,15 @@ tests:
content-type: application/json content-type: application/json
x-roles: admin x-roles: admin
data: data:
service_id: "6c1b8a30-797f-4b7e-ad66-9879b79059fb" service_id: "6c1b8a30-797f-4b7e-ad66-000000000001"
name: "flavor_id" name: "flavor_id"
status: 201 status: 201
response_json_paths: response_json_paths:
$.service_id: "6c1b8a30-797f-4b7e-ad66-9879b79059fb" $.service_id: "6c1b8a30-797f-4b7e-ad66-000000000001"
$.name: "flavor_id" $.name: "flavor_id"
$.field_id: "6c1b8a30-797f-4b7e-ad66-9879b79059fb" $.field_id: "6c1b8a30-797f-4b7e-ad66-000000000004"
response_headers: response_headers:
location: '$SCHEME://$NETLOC/v1/rating/module_config/hashmap/fields/6c1b8a30-797f-4b7e-ad66-9879b79059fb' location: '$SCHEME://$NETLOC/v1/rating/module_config/hashmap/fields/6c1b8a30-797f-4b7e-ad66-000000000004'
- name: check redirect on field mapping creation - name: check redirect on field mapping creation
url: /v1/rating/module_config/hashmap/mappings url: /v1/rating/module_config/hashmap/mappings
@@ -93,19 +95,21 @@ tests:
content-type: application/json content-type: application/json
x-roles: admin x-roles: admin
data: data:
field_id: "6c1b8a30-797f-4b7e-ad66-9879b79059fb" field_id: "6c1b8a30-797f-4b7e-ad66-000000000004"
value: "04774238-fcad-11e5-a90e-6391fd56aab2" value: "04774238-fcad-11e5-a90e-6391fd56aab2"
type: "flat" type: "flat"
start: "3000-01-01"
name: 'teste2'
cost: "0.10000000" cost: "0.10000000"
status: 201 status: 201
response_json_paths: response_json_paths:
$.mapping_id: "6c1b8a30-797f-4b7e-ad66-9879b79059fb" $.mapping_id: "6c1b8a30-797f-4b7e-ad66-000000000005"
$.field_id: "6c1b8a30-797f-4b7e-ad66-9879b79059fb" $.field_id: "6c1b8a30-797f-4b7e-ad66-000000000004"
$.value: "04774238-fcad-11e5-a90e-6391fd56aab2" $.value: "04774238-fcad-11e5-a90e-6391fd56aab2"
$.type: "flat" $.type: "flat"
$.cost: "0.1000000000000000055511151231" $.cost: "0.1000000000000000055511151231"
response_headers: response_headers:
location: '$SCHEME://$NETLOC/v1/rating/module_config/hashmap/mappings/6c1b8a30-797f-4b7e-ad66-9879b79059fb' location: '$SCHEME://$NETLOC/v1/rating/module_config/hashmap/mappings/6c1b8a30-797f-4b7e-ad66-000000000005'
- name: check redirect on field threshold creation - name: check redirect on field threshold creation
url: /v1/rating/module_config/hashmap/thresholds url: /v1/rating/module_config/hashmap/thresholds
@@ -114,19 +118,19 @@ tests:
content-type: application/json content-type: application/json
x-roles: admin x-roles: admin
data: data:
field_id: "6c1b8a30-797f-4b7e-ad66-9879b79059fb" field_id: "6c1b8a30-797f-4b7e-ad66-000000000004"
level: "2" level: "2"
type: "flat" type: "flat"
cost: "0.10000000" cost: "0.10000000"
status: 201 status: 201
response_json_paths: response_json_paths:
$.threshold_id: "6c1b8a30-797f-4b7e-ad66-9879b79059fb" $.threshold_id: "6c1b8a30-797f-4b7e-ad66-000000000006"
$.field_id: "6c1b8a30-797f-4b7e-ad66-9879b79059fb" $.field_id: "6c1b8a30-797f-4b7e-ad66-000000000004"
$.level: "2.00000000" $.level: "2.00000000"
$.type: "flat" $.type: "flat"
$.cost: "0.1000000000000000055511151231" $.cost: "0.1000000000000000055511151231"
response_headers: response_headers:
location: '$SCHEME://$NETLOC/v1/rating/module_config/hashmap/thresholds/6c1b8a30-797f-4b7e-ad66-9879b79059fb' location: '$SCHEME://$NETLOC/v1/rating/module_config/hashmap/thresholds/6c1b8a30-797f-4b7e-ad66-000000000006'
- name: check redirect on group creation - name: check redirect on group creation
url: /v1/rating/module_config/hashmap/groups url: /v1/rating/module_config/hashmap/groups
@@ -138,7 +142,7 @@ tests:
name: "compute_uptime" name: "compute_uptime"
status: 201 status: 201
response_json_paths: response_json_paths:
$.group_id: "6c1b8a30-797f-4b7e-ad66-9879b79059fb" $.group_id: "6c1b8a30-797f-4b7e-ad66-000000000007"
$.name: "compute_uptime" $.name: "compute_uptime"
response_headers: response_headers:
location: $SCHEME://$NETLOC/v1/rating/module_config/hashmap/groups/6c1b8a30-797f-4b7e-ad66-9879b79059fb location: $SCHEME://$NETLOC/v1/rating/module_config/hashmap/groups/6c1b8a30-797f-4b7e-ad66-000000000007

View File

@@ -35,7 +35,7 @@ tests:
$.name: "cpu" $.name: "cpu"
- name: create a flat service mapping - name: create a flat service mapping
url: /v1/rating/module_config/hashmap/mappings url: /v1/rating/module_config/hashmap/mappings?force=true
method: POST method: POST
request_headers: request_headers:
content-type: application/json content-type: application/json
@@ -43,6 +43,8 @@ tests:
data: data:
service_id: $RESPONSE['$.service_id'] service_id: $RESPONSE['$.service_id']
type: "flat" type: "flat"
start: "2014-01-01"
name: 'teste10'
cost: "0.10000000" cost: "0.10000000"
status: 201 status: 201
response_json_paths: response_json_paths:
@@ -63,7 +65,7 @@ tests:
$.services[0].name: "cpu" $.services[0].name: "cpu"
- name: create a rate service mapping - name: create a rate service mapping
url: /v1/rating/module_config/hashmap/mappings url: /v1/rating/module_config/hashmap/mappings?force=true
method: POST method: POST
request_headers: request_headers:
content-type: application/json content-type: application/json
@@ -71,6 +73,8 @@ tests:
data: data:
service_id: $RESPONSE['$.services[0].service_id'] service_id: $RESPONSE['$.services[0].service_id']
type: "rate" type: "rate"
start: "2014-01-01"
name: 'teste11'
cost: "0.2" cost: "0.2"
status: 201 status: 201
response_json_paths: response_json_paths:
@@ -79,7 +83,7 @@ tests:
$.cost: "0.2000000000000000111022302463" $.cost: "0.2000000000000000111022302463"
- name: create a flat service mapping for a tenant - name: create a flat service mapping for a tenant
url: /v1/rating/module_config/hashmap/mappings url: /v1/rating/module_config/hashmap/mappings?force=true
method: POST method: POST
request_headers: request_headers:
content-type: application/json content-type: application/json
@@ -87,6 +91,8 @@ tests:
data: data:
service_id: $ENVIRON['hash_service_id'] service_id: $ENVIRON['hash_service_id']
type: "flat" type: "flat"
start: "2014-01-01"
name: 'teste12'
cost: "0.2" cost: "0.2"
tenant_id: "24a7fdae-27ff-11e6-8c4f-6b725a05bf50" tenant_id: "24a7fdae-27ff-11e6-8c4f-6b725a05bf50"
status: 201 status: 201
@@ -177,7 +183,7 @@ tests:
$.field_id: $RESPONSE['$.field_id'] $.field_id: $RESPONSE['$.field_id']
- name: create a flat field mapping - name: create a flat field mapping
url: /v1/rating/module_config/hashmap/mappings url: /v1/rating/module_config/hashmap/mappings?force=true
method: POST method: POST
request_headers: request_headers:
content-type: application/json content-type: application/json
@@ -185,6 +191,8 @@ tests:
data: data:
field_id: $RESPONSE['$.field_id'] field_id: $RESPONSE['$.field_id']
type: "rate" type: "rate"
start: "2014-01-01"
name: 'teste13'
cost: "0.2" cost: "0.2"
value: "e2083e22-0004-11e6-82bd-2f02489b068b" value: "e2083e22-0004-11e6-82bd-2f02489b068b"
status: 201 status: 201
@@ -208,7 +216,7 @@ tests:
$.fields[0].name: "flavor_id" $.fields[0].name: "flavor_id"
- name: create a rate field mapping - name: create a rate field mapping
url: /v1/rating/module_config/hashmap/mappings url: /v1/rating/module_config/hashmap/mappings?force=true
method: POST method: POST
request_headers: request_headers:
content-type: application/json content-type: application/json
@@ -216,6 +224,8 @@ tests:
data: data:
field_id: $RESPONSE['$.fields[0].field_id'] field_id: $RESPONSE['$.fields[0].field_id']
type: "rate" type: "rate"
start: "2014-01-01"
name: 'teste14'
cost: "0.2" cost: "0.2"
value: "f17a0674-0004-11e6-a16b-cf941f4668c4" value: "f17a0674-0004-11e6-a16b-cf941f4668c4"
status: 201 status: 201
@@ -236,7 +246,9 @@ tests:
type: "rate" type: "rate"
cost: "0.3" cost: "0.3"
value: "f17a0674-0004-11e6-a16b-cf941f4668c4" value: "f17a0674-0004-11e6-a16b-cf941f4668c4"
status: 302 status: 400
response_json_paths:
$.faultstring: "You are allowed to update only the attribute [end] as this rule is already running as it started on [2014-01-01 00:00:00]"
- name: check updated mapping - name: check updated mapping
url: /v1/rating/module_config/hashmap/mappings/$ENVIRON['hash_rate_mapping_id'] url: /v1/rating/module_config/hashmap/mappings/$ENVIRON['hash_rate_mapping_id']
@@ -245,7 +257,7 @@ tests:
$.mapping_id: $ENVIRON['hash_rate_mapping_id'] $.mapping_id: $ENVIRON['hash_rate_mapping_id']
$.field_id: $ENVIRON['hash_field_id'] $.field_id: $ENVIRON['hash_field_id']
$.type: "rate" $.type: "rate"
$.cost: "0.2999999999999999888977697537" $.cost: "0.2000000000000000111022302463"
$.value: "f17a0674-0004-11e6-a16b-cf941f4668c4" $.value: "f17a0674-0004-11e6-a16b-cf941f4668c4"
- name: delete a field - name: delete a field
@@ -304,7 +316,7 @@ tests:
hash_field_id: $.field_id hash_field_id: $.field_id
- name: create a field mapping for recursive delete - name: create a field mapping for recursive delete
url: /v1/rating/module_config/hashmap/mappings url: /v1/rating/module_config/hashmap/mappings?force=true
method: POST method: POST
request_headers: request_headers:
content-type: application/json content-type: application/json
@@ -312,6 +324,8 @@ tests:
data: data:
field_id: $RESPONSE['$.field_id'] field_id: $RESPONSE['$.field_id']
value: "flavor_id" value: "flavor_id"
start: "2014-01-01"
name: 'teste15'
cost: "0.1" cost: "0.1"
status: 201 status: 201
response_store_environ: response_store_environ:

View File

@@ -35,12 +35,12 @@ tests:
data: "a = 0" data: "a = 0"
status: 201 status: 201
response_json_paths: response_json_paths:
$.script_id: "6c1b8a30-797f-4b7e-ad66-9879b79059fb" $.script_id: "6c1b8a30-797f-4b7e-ad66-000000000001"
$.name: "policy1" $.name: "policy1"
$.data: "a = 0" $.data: "a = 0"
$.checksum: "4c612e33c0e40b7bf53cf95fad47dbfbeab9dd62f9bc181a9d1c6f40a087782223c23f793e747b0466b9e6998c6ea54f4edbd20febd13edb13b55074b5ee1a5a" $.checksum: "4c612e33c0e40b7bf53cf95fad47dbfbeab9dd62f9bc181a9d1c6f40a087782223c23f793e747b0466b9e6998c6ea54f4edbd20febd13edb13b55074b5ee1a5a"
response_headers: response_headers:
location: '$SCHEME://$NETLOC/v1/rating/module_config/pyscripts/scripts/6c1b8a30-797f-4b7e-ad66-9879b79059fb' location: '$SCHEME://$NETLOC/v1/rating/module_config/pyscripts/scripts/6c1b8a30-797f-4b7e-ad66-000000000001'
- name: create duplicate policy script - name: create duplicate policy script
url: /v1/rating/module_config/pyscripts/scripts url: /v1/rating/module_config/pyscripts/scripts
@@ -53,13 +53,13 @@ tests:
data: "a = 0" data: "a = 0"
status: 409 status: 409
response_strings: response_strings:
- "Script policy1 already exists (UUID: 6c1b8a30-797f-4b7e-ad66-9879b79059fb)" - "Script policy1 already exists (UUID: 6c1b8a30-797f-4b7e-ad66-000000000001)"
- name: list scripts - name: list scripts
url: /v1/rating/module_config/pyscripts/scripts url: /v1/rating/module_config/pyscripts/scripts
status: 200 status: 200
response_json_paths: response_json_paths:
$.scripts[0].script_id: "6c1b8a30-797f-4b7e-ad66-9879b79059fb" $.scripts[0].script_id: "6c1b8a30-797f-4b7e-ad66-000000000001"
$.scripts[0].name: "policy1" $.scripts[0].name: "policy1"
$.scripts[0].data: "a = 0" $.scripts[0].data: "a = 0"
$.scripts[0].checksum: "4c612e33c0e40b7bf53cf95fad47dbfbeab9dd62f9bc181a9d1c6f40a087782223c23f793e747b0466b9e6998c6ea54f4edbd20febd13edb13b55074b5ee1a5a" $.scripts[0].checksum: "4c612e33c0e40b7bf53cf95fad47dbfbeab9dd62f9bc181a9d1c6f40a087782223c23f793e747b0466b9e6998c6ea54f4edbd20febd13edb13b55074b5ee1a5a"
@@ -68,21 +68,21 @@ tests:
url: /v1/rating/module_config/pyscripts/scripts?no_data=true url: /v1/rating/module_config/pyscripts/scripts?no_data=true
status: 200 status: 200
response_json_paths: response_json_paths:
$.scripts[0].script_id: "6c1b8a30-797f-4b7e-ad66-9879b79059fb" $.scripts[0].script_id: "6c1b8a30-797f-4b7e-ad66-000000000001"
$.scripts[0].name: "policy1" $.scripts[0].name: "policy1"
$.scripts[0].checksum: "4c612e33c0e40b7bf53cf95fad47dbfbeab9dd62f9bc181a9d1c6f40a087782223c23f793e747b0466b9e6998c6ea54f4edbd20febd13edb13b55074b5ee1a5a" $.scripts[0].checksum: "4c612e33c0e40b7bf53cf95fad47dbfbeab9dd62f9bc181a9d1c6f40a087782223c23f793e747b0466b9e6998c6ea54f4edbd20febd13edb13b55074b5ee1a5a"
- name: get script - name: get script
url: /v1/rating/module_config/pyscripts/scripts/6c1b8a30-797f-4b7e-ad66-9879b79059fb url: /v1/rating/module_config/pyscripts/scripts/6c1b8a30-797f-4b7e-ad66-000000000001
status: 200 status: 200
response_json_paths: response_json_paths:
$.script_id: "6c1b8a30-797f-4b7e-ad66-9879b79059fb" $.script_id: "6c1b8a30-797f-4b7e-ad66-000000000001"
$.name: "policy1" $.name: "policy1"
$.data: "a = 0" $.data: "a = 0"
$.checksum: "4c612e33c0e40b7bf53cf95fad47dbfbeab9dd62f9bc181a9d1c6f40a087782223c23f793e747b0466b9e6998c6ea54f4edbd20febd13edb13b55074b5ee1a5a" $.checksum: "4c612e33c0e40b7bf53cf95fad47dbfbeab9dd62f9bc181a9d1c6f40a087782223c23f793e747b0466b9e6998c6ea54f4edbd20febd13edb13b55074b5ee1a5a"
- name: modify script - name: modify script
url: /v1/rating/module_config/pyscripts/scripts/6c1b8a30-797f-4b7e-ad66-9879b79059fb url: /v1/rating/module_config/pyscripts/scripts/6c1b8a30-797f-4b7e-ad66-000000000001
method: PUT method: PUT
request_headers: request_headers:
content-type: application/json content-type: application/json
@@ -90,12 +90,9 @@ tests:
data: data:
name: "policy1" name: "policy1"
data: "a = 1" data: "a = 1"
status: 201 status: 400
response_json_paths: response_strings:
$.script_id: "6c1b8a30-797f-4b7e-ad66-9879b79059fb" - "You are allowed to update only the attribute [end] as this rule is already running as it started on "
$.name: "policy1"
$.data: "a = 1"
$.checksum: "acb3095e24b13960484e75bce070e13e8a7728760517c31b34929a6f732841c652e9d2cc4d186bd02ef2e7495fab3c4850673bedc945cee7c74fea85eabd542c"
- name: modify unknown script - name: modify unknown script
url: /v1/rating/module_config/pyscripts/scripts/42 url: /v1/rating/module_config/pyscripts/scripts/42
@@ -111,19 +108,19 @@ tests:
- "No such script: None (UUID: 42)" - "No such script: None (UUID: 42)"
- name: check updated script - name: check updated script
url: /v1/rating/module_config/pyscripts/scripts/6c1b8a30-797f-4b7e-ad66-9879b79059fb url: /v1/rating/module_config/pyscripts/scripts/6c1b8a30-797f-4b7e-ad66-000000000001
request_headers: request_headers:
content-type: application/json content-type: application/json
x-roles: admin x-roles: admin
status: 200 status: 200
response_json_paths: response_json_paths:
$.script_id: "6c1b8a30-797f-4b7e-ad66-9879b79059fb" $.script_id: "6c1b8a30-797f-4b7e-ad66-000000000001"
$.name: "policy1" $.name: "policy1"
$.data: "a = 1" $.data: "a = 0"
$.checksum: "acb3095e24b13960484e75bce070e13e8a7728760517c31b34929a6f732841c652e9d2cc4d186bd02ef2e7495fab3c4850673bedc945cee7c74fea85eabd542c" $.checksum: "4c612e33c0e40b7bf53cf95fad47dbfbeab9dd62f9bc181a9d1c6f40a087782223c23f793e747b0466b9e6998c6ea54f4edbd20febd13edb13b55074b5ee1a5a"
- name: delete script - name: delete script
url: /v1/rating/module_config/pyscripts/scripts/6c1b8a30-797f-4b7e-ad66-9879b79059fb url: /v1/rating/module_config/pyscripts/scripts/6c1b8a30-797f-4b7e-ad66-000000000001
method: DELETE method: DELETE
status: 204 status: 204

View File

@@ -139,6 +139,8 @@ class HashMapRatingTest(tests.TestCase):
'flavor') 'flavor')
group_db = self._db_api.create_group('test_group') group_db = self._db_api.create_group('test_group')
self._db_api.create_mapping( self._db_api.create_mapping(
name=uuidutils.generate_uuid(False),
created_by='1',
value='m1.tiny', value='m1.tiny',
cost='1.337', cost='1.337',
map_type='flat', map_type='flat',
@@ -156,6 +158,8 @@ class HashMapRatingTest(tests.TestCase):
'flavor') 'flavor')
group_db = self._db_api.create_group('test_group') group_db = self._db_api.create_group('test_group')
mapping_db = self._db_api.create_mapping( mapping_db = self._db_api.create_mapping(
name=uuidutils.generate_uuid(False),
created_by='1',
value='m1.tiny', value='m1.tiny',
cost='1.337', cost='1.337',
map_type='flat', map_type='flat',
@@ -173,11 +177,15 @@ class HashMapRatingTest(tests.TestCase):
service_db = self._db_api.create_service('compute') service_db = self._db_api.create_service('compute')
group_db = self._db_api.create_group('test_group') group_db = self._db_api.create_group('test_group')
mapping_tiny = self._db_api.create_mapping( mapping_tiny = self._db_api.create_mapping(
name=uuidutils.generate_uuid(False),
created_by='1',
cost='1.337', cost='1.337',
map_type='flat', map_type='flat',
service_id=service_db.service_id, service_id=service_db.service_id,
group_id=group_db.group_id) group_id=group_db.group_id)
self._db_api.create_mapping( self._db_api.create_mapping(
name=uuidutils.generate_uuid(False),
created_by='1',
cost='42', cost='42',
map_type='flat', map_type='flat',
service_id=service_db.service_id) service_id=service_db.service_id)
@@ -191,18 +199,24 @@ class HashMapRatingTest(tests.TestCase):
'flavor') 'flavor')
group_db = self._db_api.create_group('test_group') group_db = self._db_api.create_group('test_group')
mapping_tiny = self._db_api.create_mapping( mapping_tiny = self._db_api.create_mapping(
name=uuidutils.generate_uuid(False),
created_by='1',
value='m1.tiny', value='m1.tiny',
cost='1.337', cost='1.337',
map_type='flat', map_type='flat',
field_id=field_db.field_id, field_id=field_db.field_id,
group_id=group_db.group_id) group_id=group_db.group_id)
mapping_small = self._db_api.create_mapping( mapping_small = self._db_api.create_mapping(
name=uuidutils.generate_uuid(False),
created_by='1',
value='m1.small', value='m1.small',
cost='3.1337', cost='3.1337',
map_type='flat', map_type='flat',
field_id=field_db.field_id, field_id=field_db.field_id,
group_id=group_db.group_id) group_id=group_db.group_id)
self._db_api.create_mapping( self._db_api.create_mapping(
name=uuidutils.generate_uuid(False),
created_by='1',
value='m1.large', value='m1.large',
cost='42', cost='42',
map_type='flat', map_type='flat',
@@ -219,18 +233,24 @@ class HashMapRatingTest(tests.TestCase):
'flavor') 'flavor')
group_db = self._db_api.create_group('test_group') group_db = self._db_api.create_group('test_group')
self._db_api.create_mapping( self._db_api.create_mapping(
name=uuidutils.generate_uuid(False),
created_by='1',
value='m1.tiny', value='m1.tiny',
cost='1.337', cost='1.337',
map_type='flat', map_type='flat',
field_id=field_db.field_id, field_id=field_db.field_id,
group_id=group_db.group_id) group_id=group_db.group_id)
self._db_api.create_mapping( self._db_api.create_mapping(
name=uuidutils.generate_uuid(False),
created_by='1',
value='m1.small', value='m1.small',
cost='3.1337', cost='3.1337',
map_type='flat', map_type='flat',
field_id=field_db.field_id, field_id=field_db.field_id,
group_id=group_db.group_id) group_id=group_db.group_id)
mapping_no_group = self._db_api.create_mapping( mapping_no_group = self._db_api.create_mapping(
name=uuidutils.generate_uuid(False),
created_by='1',
value='m1.large', value='m1.large',
cost='42', cost='42',
map_type='flat', map_type='flat',
@@ -324,6 +344,8 @@ class HashMapRatingTest(tests.TestCase):
field_db = self._db_api.create_field(service_db.service_id, field_db = self._db_api.create_field(service_db.service_id,
'flavor') 'flavor')
mapping_db = self._db_api.create_mapping( mapping_db = self._db_api.create_mapping(
name=uuidutils.generate_uuid(False),
created_by='1',
value='m1.tiny', value='m1.tiny',
cost='1.337', cost='1.337',
map_type='flat', map_type='flat',
@@ -336,6 +358,8 @@ class HashMapRatingTest(tests.TestCase):
field_db = self._db_api.create_field(service_db.service_id, field_db = self._db_api.create_field(service_db.service_id,
'flavor') 'flavor')
mapping_db = self._db_api.create_mapping( mapping_db = self._db_api.create_mapping(
name=uuidutils.generate_uuid(False),
created_by='1',
value='m1.tiny', value='m1.tiny',
cost='1.337', cost='1.337',
map_type='flat', map_type='flat',
@@ -350,6 +374,8 @@ class HashMapRatingTest(tests.TestCase):
def test_list_mappings_from_services(self): def test_list_mappings_from_services(self):
service_db = self._db_api.create_service('compute') service_db = self._db_api.create_service('compute')
mapping_db = self._db_api.create_mapping( mapping_db = self._db_api.create_mapping(
name=uuidutils.generate_uuid(False),
created_by='1',
cost='1.337', cost='1.337',
map_type='flat', map_type='flat',
service_id=service_db.service_id) service_id=service_db.service_id)
@@ -362,6 +388,8 @@ class HashMapRatingTest(tests.TestCase):
field_db = self._db_api.create_field(service_db.service_id, field_db = self._db_api.create_field(service_db.service_id,
'flavor') 'flavor')
mapping_db = self._db_api.create_mapping( mapping_db = self._db_api.create_mapping(
name=uuidutils.generate_uuid(False),
created_by='1',
value='m1.tiny', value='m1.tiny',
cost='1.337', cost='1.337',
map_type='flat', map_type='flat',
@@ -398,6 +426,8 @@ class HashMapRatingTest(tests.TestCase):
field_db = self._db_api.create_field(service_db.service_id, field_db = self._db_api.create_field(service_db.service_id,
'flavor') 'flavor')
mapping_db = self._db_api.create_mapping( mapping_db = self._db_api.create_mapping(
name=uuidutils.generate_uuid(False),
created_by='1',
value='m1.tiny', value='m1.tiny',
cost='1.337', cost='1.337',
map_type='flat', map_type='flat',
@@ -414,6 +444,8 @@ class HashMapRatingTest(tests.TestCase):
field_db = self._db_api.create_field(service_db.service_id, field_db = self._db_api.create_field(service_db.service_id,
'flavor') 'flavor')
mapping_db = self._db_api.create_mapping( mapping_db = self._db_api.create_mapping(
name=uuidutils.generate_uuid(False),
created_by='1',
value='m1.tiny', value='m1.tiny',
cost='1.337', cost='1.337',
map_type='flat', map_type='flat',
@@ -433,6 +465,8 @@ class HashMapRatingTest(tests.TestCase):
field_db = self._db_api.create_field(service_db.service_id, field_db = self._db_api.create_field(service_db.service_id,
'flavor') 'flavor')
mapping_db = self._db_api.create_mapping( mapping_db = self._db_api.create_mapping(
name=uuidutils.generate_uuid(False),
created_by='1',
value='m1.tiny', value='m1.tiny',
cost='1.337', cost='1.337',
map_type='flat', map_type='flat',
@@ -447,6 +481,8 @@ class HashMapRatingTest(tests.TestCase):
service_db.service_id, service_db.service_id,
'flavor') 'flavor')
mapping_db = self._db_api.create_mapping( mapping_db = self._db_api.create_mapping(
name=uuidutils.generate_uuid(False),
created_by='1',
value='m1.tiny', value='m1.tiny',
cost='1.337', cost='1.337',
map_type='flat', map_type='flat',
@@ -464,12 +500,16 @@ class HashMapRatingTest(tests.TestCase):
service_db.service_id, service_db.service_id,
'flavor') 'flavor')
mapping_db = self._db_api.create_mapping( mapping_db = self._db_api.create_mapping(
name=uuidutils.generate_uuid(False),
created_by='1',
value='m1.tiny', value='m1.tiny',
cost='1.337', cost='1.337',
map_type='flat', map_type='flat',
field_id=field_db.field_id, field_id=field_db.field_id,
tenant_id=self._tenant_id) tenant_id=self._tenant_id)
self._db_api.create_mapping( self._db_api.create_mapping(
name=uuidutils.generate_uuid(False),
created_by='1',
value='m1.small', value='m1.small',
cost='1.337', cost='1.337',
map_type='flat', map_type='flat',
@@ -485,11 +525,15 @@ class HashMapRatingTest(tests.TestCase):
service_db.service_id, service_db.service_id,
'flavor') 'flavor')
mapping_db = self._db_api.create_mapping( mapping_db = self._db_api.create_mapping(
name=uuidutils.generate_uuid(False),
created_by='1',
value='m1.tiny', value='m1.tiny',
cost='1.337', cost='1.337',
map_type='flat', map_type='flat',
field_id=field_db.field_id) field_id=field_db.field_id)
self._db_api.create_mapping( self._db_api.create_mapping(
name=uuidutils.generate_uuid(False),
created_by='1',
value='m1.small', value='m1.small',
cost='1.337', cost='1.337',
map_type='flat', map_type='flat',
@@ -711,17 +755,23 @@ class HashMapRatingTest(tests.TestCase):
group_db = self._db_api.create_group('test_group') group_db = self._db_api.create_group('test_group')
mapping_list.append( mapping_list.append(
self._db_api.create_mapping( self._db_api.create_mapping(
name=uuidutils.generate_uuid(False),
created_by='1',
cost='1.42', cost='1.42',
map_type='rate', map_type='rate',
service_id=service_db.service_id)) service_id=service_db.service_id))
mapping_list.append( mapping_list.append(
self._db_api.create_mapping( self._db_api.create_mapping(
name=uuidutils.generate_uuid(False),
created_by='1',
value='m1.tiny', value='m1.tiny',
cost='1.337', cost='1.337',
map_type='flat', map_type='flat',
field_id=flavor_field.field_id)) field_id=flavor_field.field_id))
mapping_list.append( mapping_list.append(
self._db_api.create_mapping( self._db_api.create_mapping(
name=uuidutils.generate_uuid(False),
created_by='1',
value='m1.large', value='m1.large',
cost='13.37', cost='13.37',
map_type='rate', map_type='rate',
@@ -730,6 +780,8 @@ class HashMapRatingTest(tests.TestCase):
# Per tenant override # Per tenant override
mapping_list.append( mapping_list.append(
self._db_api.create_mapping( self._db_api.create_mapping(
name=uuidutils.generate_uuid(False),
created_by='1',
value='m1.tiny', value='m1.tiny',
cost='2', cost='2',
map_type='flat', map_type='flat',
@@ -808,12 +860,16 @@ class HashMapRatingTest(tests.TestCase):
group_db = self._db_api.create_group('test_group') group_db = self._db_api.create_group('test_group')
mapping_list.append( mapping_list.append(
self._db_api.create_mapping( self._db_api.create_mapping(
name=uuidutils.generate_uuid(False),
created_by='1',
value='m1.tiny', value='m1.tiny',
cost='1.337', cost='1.337',
map_type='flat', map_type='flat',
field_id=field_db.field_id)) field_id=field_db.field_id))
mapping_list.append( mapping_list.append(
self._db_api.create_mapping( self._db_api.create_mapping(
name=uuidutils.generate_uuid(False),
created_by='1',
value='m1.large', value='m1.large',
cost='13.37', cost='13.37',
map_type='rate', map_type='rate',
@@ -859,11 +915,17 @@ class HashMapRatingTest(tests.TestCase):
service_db = self._db_api.create_service('compute') service_db = self._db_api.create_service('compute')
group_db = self._db_api.create_group('test_group') group_db = self._db_api.create_group('test_group')
self._db_api.create_mapping( self._db_api.create_mapping(
name=uuidutils.generate_uuid(False),
created_by='1',
cost='1.337', cost='1.337',
start=datetime.datetime(2014, 1, 1),
map_type='flat', map_type='flat',
service_id=service_db.service_id, service_id=service_db.service_id,
group_id=group_db.group_id) group_id=group_db.group_id)
self._db_api.create_mapping( self._db_api.create_mapping(
name=uuidutils.generate_uuid(False),
created_by='1',
start=datetime.datetime(2014, 1, 3),
cost='1.42', cost='1.42',
map_type='flat', map_type='flat',
service_id=service_db.service_id) service_id=service_db.service_id)
@@ -899,18 +961,24 @@ class HashMapRatingTest(tests.TestCase):
'image_id') 'image_id')
group_db = self._db_api.create_group('test_group') group_db = self._db_api.create_group('test_group')
self._db_api.create_mapping( self._db_api.create_mapping(
name=uuidutils.generate_uuid(False),
created_by='1',
value='m1.nano', value='m1.nano',
cost='1.337', cost='1.337',
map_type='flat', map_type='flat',
field_id=flavor_field.field_id, field_id=flavor_field.field_id,
group_id=group_db.group_id) group_id=group_db.group_id)
self._db_api.create_mapping( self._db_api.create_mapping(
name=uuidutils.generate_uuid(False),
created_by='1',
value='a41fba37-2429-4f15-aa00-b5bc4bf557bf', value='a41fba37-2429-4f15-aa00-b5bc4bf557bf',
cost='1.10', cost='1.10',
map_type='rate', map_type='rate',
field_id=image_field.field_id, field_id=image_field.field_id,
group_id=group_db.group_id) group_id=group_db.group_id)
self._db_api.create_mapping( self._db_api.create_mapping(
name=uuidutils.generate_uuid(False),
created_by='1',
value='m1.tiny', value='m1.tiny',
cost='1.42', cost='1.42',
map_type='flat', map_type='flat',
@@ -943,6 +1011,8 @@ class HashMapRatingTest(tests.TestCase):
flavor_field = self._db_api.create_field(service_db.service_id, flavor_field = self._db_api.create_field(service_db.service_id,
'flavor') 'flavor')
self._db_api.create_mapping( self._db_api.create_mapping(
name=uuidutils.generate_uuid(False),
created_by='1',
value='non-existent', value='non-existent',
cost='1.337', cost='1.337',
map_type='flat', map_type='flat',
@@ -1148,22 +1218,30 @@ class HashMapRatingTest(tests.TestCase):
group_db = self._db_api.create_group('test_group') group_db = self._db_api.create_group('test_group')
second_group_db = self._db_api.create_group('second_test_group') second_group_db = self._db_api.create_group('second_test_group')
self._db_api.create_mapping( self._db_api.create_mapping(
name=uuidutils.generate_uuid(False),
created_by='1',
cost='1.00', cost='1.00',
map_type='flat', map_type='flat',
service_id=service_db.service_id) service_id=service_db.service_id)
self._db_api.create_mapping( self._db_api.create_mapping(
name=uuidutils.generate_uuid(False),
created_by='1',
value='m1.nano', value='m1.nano',
cost='1.337', cost='1.337',
map_type='flat', map_type='flat',
field_id=flavor_db.field_id, field_id=flavor_db.field_id,
group_id=group_db.group_id) group_id=group_db.group_id)
self._db_api.create_mapping( self._db_api.create_mapping(
name=uuidutils.generate_uuid(False),
created_by='1',
value='m1.tiny', value='m1.tiny',
cost='1.42', cost='1.42',
map_type='flat', map_type='flat',
field_id=flavor_db.field_id, field_id=flavor_db.field_id,
group_id=group_db.group_id) group_id=group_db.group_id)
self._db_api.create_mapping( self._db_api.create_mapping(
name=uuidutils.generate_uuid(False),
created_by='1',
value='8', value='8',
cost='16.0', cost='16.0',
map_type='flat', map_type='flat',
@@ -1172,6 +1250,8 @@ class HashMapRatingTest(tests.TestCase):
image_db = self._db_api.create_field(service_db.service_id, image_db = self._db_api.create_field(service_db.service_id,
'image_id') 'image_id')
self._db_api.create_mapping( self._db_api.create_mapping(
name=uuidutils.generate_uuid(False),
created_by='1',
value='a41fba37-2429-4f15-aa00-b5bc4bf557bf', value='a41fba37-2429-4f15-aa00-b5bc4bf557bf',
cost='1.10', cost='1.10',
map_type='rate', map_type='rate',

View File

@@ -236,10 +236,17 @@ class WorkerTest(tests.TestCase):
def setUp(self): def setUp(self):
super(WorkerTest, self).setUp() super(WorkerTest, self).setUp()
patcher_state_manager_get_state = mock.patch(
"cloudkitty.storage_state.StateManager"
".get_last_processed_timestamp")
patcher_state_manager_set_state = mock.patch( patcher_state_manager_set_state = mock.patch(
"cloudkitty.storage_state." "cloudkitty.storage_state."
"StateManager.set_last_processed_timestamp") "StateManager.set_last_processed_timestamp")
self.addCleanup(patcher_state_manager_set_state.stop) self.addCleanup(patcher_state_manager_set_state.stop)
self.state_manager_get_state_mock = \
patcher_state_manager_get_state.start()
self.state_manager_get_state_mock.return_value = datetime.datetime(
2019, 7, 16, 8, 55, 1)
self.state_manager_set_state_mock = \ self.state_manager_set_state_mock = \
patcher_state_manager_set_state.start() patcher_state_manager_set_state.start()
@@ -882,6 +889,13 @@ class ReprocessingWorkerTest(tests.TestCase):
def setUp(self): def setUp(self):
super(ReprocessingWorkerTest, self).setUp() super(ReprocessingWorkerTest, self).setUp()
patcher_state_manager_get_state = mock.patch(
"cloudkitty.storage_state.StateManager"
".get_last_processed_timestamp")
self.state_manager_get_state_mock = \
patcher_state_manager_get_state.start()
self.state_manager_get_state_mock.return_value = datetime.datetime(
2019, 7, 16, 8, 55, 1)
patcher_reprocessing_scheduler_db_get_from_db = mock.patch( patcher_reprocessing_scheduler_db_get_from_db = mock.patch(
"cloudkitty.storage_state.ReprocessingSchedulerDb.get_from_db") "cloudkitty.storage_state.ReprocessingSchedulerDb.get_from_db")
self.addCleanup(patcher_reprocessing_scheduler_db_get_from_db.stop) self.addCleanup(patcher_reprocessing_scheduler_db_get_from_db.stop)

View File

@@ -214,48 +214,56 @@ class PyScriptsRatingTest(tests.TestCase):
@mock.patch.object(uuidutils, 'generate_uuid', @mock.patch.object(uuidutils, 'generate_uuid',
return_value=FAKE_UUID) return_value=FAKE_UUID)
def test_create_script(self, patch_generate_uuid): def test_create_script(self, patch_generate_uuid):
self._db_api.create_script('policy1', TEST_CODE1) self._db_api.create_script('policy1', TEST_CODE1,
created_by='')
scripts = self._db_api.list_scripts() scripts = self._db_api.list_scripts()
self.assertEqual([FAKE_UUID], scripts) self.assertEqual([FAKE_UUID], scripts)
patch_generate_uuid.assert_called_once_with() patch_generate_uuid.assert_called_once_with()
def test_create_duplicate_script(self): def test_create_duplicate_script(self):
self._db_api.create_script('policy1', TEST_CODE1) self._db_api.create_script('policy1', TEST_CODE1,
created_by='')
self.assertRaises(api.ScriptAlreadyExists, self.assertRaises(api.ScriptAlreadyExists,
self._db_api.create_script, self._db_api.create_script,
'policy1', 'policy1',
TEST_CODE1) TEST_CODE1, created_by='')
def test_get_script_by_uuid(self): def test_get_script_by_uuid(self):
expected = self._db_api.create_script('policy1', TEST_CODE1) expected = self._db_api.create_script('policy1', TEST_CODE1,
created_by='')
actual = self._db_api.get_script(uuid=expected.script_id) actual = self._db_api.get_script(uuid=expected.script_id)
self.assertEqual(expected.data, actual.data) self.assertEqual(expected.data, actual.data)
def test_get_script_by_name(self): def test_get_script_by_name(self):
expected = self._db_api.create_script('policy1', TEST_CODE1) expected = self._db_api.create_script('policy1', TEST_CODE1,
created_by='')
actual = self._db_api.get_script(expected.name) actual = self._db_api.get_script(expected.name)
self.assertEqual(expected.data, actual.data) self.assertEqual(expected.data, actual.data)
def test_get_script_without_parameters(self): def test_get_script_without_parameters(self):
self._db_api.create_script('policy1', TEST_CODE1) self._db_api.create_script('policy1', TEST_CODE1,
created_by='')
self.assertRaises( self.assertRaises(
ValueError, ValueError,
self._db_api.get_script) self._db_api.get_script)
def test_delete_script_by_name(self): def test_delete_script_by_name(self):
self._db_api.create_script('policy1', TEST_CODE1) self._db_api.create_script('policy1', TEST_CODE1,
created_by='')
self._db_api.delete_script('policy1') self._db_api.delete_script('policy1')
scripts = self._db_api.list_scripts() scripts = self._db_api.list_scripts()
self.assertEqual([], scripts) self.assertEqual([], scripts)
def test_delete_script_by_uuid(self): def test_delete_script_by_uuid(self):
script_db = self._db_api.create_script('policy1', TEST_CODE1) script_db = self._db_api.create_script('policy1', TEST_CODE1,
created_by='')
self._db_api.delete_script(uuid=script_db.script_id) self._db_api.delete_script(uuid=script_db.script_id)
scripts = self._db_api.list_scripts() scripts = self._db_api.list_scripts()
self.assertEqual([], scripts) self.assertEqual([], scripts)
def test_delete_script_without_parameters(self): def test_delete_script_without_parameters(self):
self._db_api.create_script('policy1', TEST_CODE1) self._db_api.create_script('policy1', TEST_CODE1,
created_by='')
self.assertRaises( self.assertRaises(
ValueError, ValueError,
self._db_api.delete_script) self._db_api.delete_script)
@@ -272,13 +280,15 @@ class PyScriptsRatingTest(tests.TestCase):
uuid='6e8de9fc-ee17-4b60-b81a-c9320e994e76') uuid='6e8de9fc-ee17-4b60-b81a-c9320e994e76')
def test_update_script(self): def test_update_script(self):
script_db = self._db_api.create_script('policy1', TEST_CODE1) script_db = self._db_api.create_script('policy1', TEST_CODE1,
created_by='')
self._db_api.update_script(script_db.script_id, data=TEST_CODE2) self._db_api.update_script(script_db.script_id, data=TEST_CODE2)
actual = self._db_api.get_script(uuid=script_db.script_id) actual = self._db_api.get_script(uuid=script_db.script_id)
self.assertEqual(TEST_CODE2, actual.data) self.assertEqual(TEST_CODE2, actual.data)
def test_update_script_uuid_disabled(self): def test_update_script_uuid_disabled(self):
expected = self._db_api.create_script('policy1', TEST_CODE1) expected = self._db_api.create_script('policy1', TEST_CODE1,
created_by='')
self._db_api.update_script(expected.script_id, self._db_api.update_script(expected.script_id,
data=TEST_CODE2, data=TEST_CODE2,
script_id='42') script_id='42')
@@ -286,7 +296,8 @@ class PyScriptsRatingTest(tests.TestCase):
self.assertEqual(expected.script_id, actual.script_id) self.assertEqual(expected.script_id, actual.script_id)
def test_update_script_unknown_attribute(self): def test_update_script_unknown_attribute(self):
expected = self._db_api.create_script('policy1', TEST_CODE1) expected = self._db_api.create_script('policy1', TEST_CODE1,
created_by='')
self.assertRaises( self.assertRaises(
ValueError, ValueError,
self._db_api.update_script, self._db_api.update_script,
@@ -294,7 +305,8 @@ class PyScriptsRatingTest(tests.TestCase):
nonexistent=1) nonexistent=1)
def test_empty_script_update(self): def test_empty_script_update(self):
expected = self._db_api.create_script('policy1', TEST_CODE1) expected = self._db_api.create_script('policy1', TEST_CODE1,
created_by='')
self.assertRaises( self.assertRaises(
ValueError, ValueError,
self._db_api.update_script, self._db_api.update_script,
@@ -303,19 +315,22 @@ class PyScriptsRatingTest(tests.TestCase):
# Storage tests # Storage tests
def test_compressed_data(self): def test_compressed_data(self):
data = TEST_CODE1 data = TEST_CODE1
self._db_api.create_script('policy1', data) self._db_api.create_script('policy1', data,
created_by='')
script = self._db_api.get_script('policy1') script = self._db_api.get_script('policy1')
expected = zlib.compress(data) expected = zlib.compress(data)
self.assertEqual(expected, script._data) self.assertEqual(expected, script._data)
def test_on_the_fly_decompression(self): def test_on_the_fly_decompression(self):
data = TEST_CODE1 data = TEST_CODE1
self._db_api.create_script('policy1', data) self._db_api.create_script('policy1', data,
created_by='')
script = self._db_api.get_script('policy1') script = self._db_api.get_script('policy1')
self.assertEqual(data, script.data) self.assertEqual(data, script.data)
def test_script_repr(self): def test_script_repr(self):
script_db = self._db_api.create_script('policy1', TEST_CODE1) script_db = self._db_api.create_script('policy1', TEST_CODE1,
created_by='')
self.assertEqual( self.assertEqual(
'<PyScripts Script[{uuid}]: name={name}>'.format( '<PyScripts Script[{uuid}]: name={name}>'.format(
uuid=script_db.script_id, uuid=script_db.script_id,
@@ -324,12 +339,14 @@ class PyScriptsRatingTest(tests.TestCase):
# Checksum tests # Checksum tests
def test_validate_checksum(self): def test_validate_checksum(self):
self._db_api.create_script('policy1', TEST_CODE1) self._db_api.create_script('policy1', TEST_CODE1,
created_by='')
script = self._db_api.get_script('policy1') script = self._db_api.get_script('policy1')
self.assertEqual(TEST_CODE1_CHECKSUM, script.checksum) self.assertEqual(TEST_CODE1_CHECKSUM, script.checksum)
def test_read_only_checksum(self): def test_read_only_checksum(self):
self._db_api.create_script('policy1', TEST_CODE1) self._db_api.create_script('policy1', TEST_CODE1,
created_by='')
script = self._db_api.get_script('policy1') script = self._db_api.get_script('policy1')
self.assertRaises( self.assertRaises(
AttributeError, AttributeError,
@@ -340,22 +357,27 @@ class PyScriptsRatingTest(tests.TestCase):
'7d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e') '7d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e')
def test_update_checksum(self): def test_update_checksum(self):
self._db_api.create_script('policy1', TEST_CODE1) self._db_api.create_script('policy1', TEST_CODE1,
created_by='')
script = self._db_api.get_script('policy1') script = self._db_api.get_script('policy1')
script = self._db_api.update_script(script.script_id, data=TEST_CODE2) script = self._db_api.update_script(script.script_id, data=TEST_CODE2)
self.assertEqual(TEST_CODE2_CHECKSUM, script.checksum) self.assertEqual(TEST_CODE2_CHECKSUM, script.checksum)
# Code exec tests # Code exec tests
def test_load_scripts(self): def test_load_scripts(self):
policy1_db = self._db_api.create_script('policy1', TEST_CODE1) policy1_db = self._db_api.create_script('policy1', TEST_CODE1,
policy2_db = self._db_api.create_script('policy2', TEST_CODE2) created_by='')
policy2_db = self._db_api.create_script('policy2', TEST_CODE2,
created_by='')
self._pyscripts.load_scripts_in_memory() self._pyscripts.load_scripts_in_memory()
self.assertIn(policy1_db.script_id, self._pyscripts._scripts) self.assertIn(policy1_db.script_id, self._pyscripts._scripts)
self.assertIn(policy2_db.script_id, self._pyscripts._scripts) self.assertIn(policy2_db.script_id, self._pyscripts._scripts)
def test_purge_old_scripts(self): def test_purge_old_scripts(self):
policy1_db = self._db_api.create_script('policy1', TEST_CODE1) policy1_db = self._db_api.create_script('policy1', TEST_CODE1,
policy2_db = self._db_api.create_script('policy2', TEST_CODE2) created_by='')
policy2_db = self._db_api.create_script('policy2', TEST_CODE2,
created_by='')
self._pyscripts.reload_config() self._pyscripts.reload_config()
self.assertIn(policy1_db.script_id, self._pyscripts._scripts) self.assertIn(policy1_db.script_id, self._pyscripts._scripts)
self.assertIn(policy2_db.script_id, self._pyscripts._scripts) self.assertIn(policy2_db.script_id, self._pyscripts._scripts)
@@ -367,7 +389,8 @@ class PyScriptsRatingTest(tests.TestCase):
@mock.patch.object(uuidutils, 'generate_uuid', @mock.patch.object(uuidutils, 'generate_uuid',
return_value=FAKE_UUID) return_value=FAKE_UUID)
def test_valid_script_data_loaded(self, patch_generate_uuid): def test_valid_script_data_loaded(self, patch_generate_uuid):
self._db_api.create_script('policy1', TEST_CODE1) self._db_api.create_script('policy1', TEST_CODE1,
created_by='')
self._pyscripts.load_scripts_in_memory() self._pyscripts.load_scripts_in_memory()
expected = { expected = {
FAKE_UUID: { FAKE_UUID: {
@@ -384,7 +407,8 @@ class PyScriptsRatingTest(tests.TestCase):
self.assertEqual(1, context['a']) self.assertEqual(1, context['a'])
def test_update_script_on_checksum_change(self): def test_update_script_on_checksum_change(self):
policy_db = self._db_api.create_script('policy1', TEST_CODE1) policy_db = self._db_api.create_script('policy1', TEST_CODE1,
created_by='')
self._pyscripts.reload_config() self._pyscripts.reload_config()
self._db_api.update_script(policy_db.script_id, data=TEST_CODE2) self._db_api.update_script(policy_db.script_id, data=TEST_CODE2)
self._pyscripts.reload_config() self._pyscripts.reload_config()
@@ -393,8 +417,10 @@ class PyScriptsRatingTest(tests.TestCase):
self._pyscripts._scripts[policy_db.script_id]['checksum']) self._pyscripts._scripts[policy_db.script_id]['checksum'])
def test_exec_code_isolation(self): def test_exec_code_isolation(self):
self._db_api.create_script('policy1', TEST_CODE1) self._db_api.create_script('policy1', TEST_CODE1,
self._db_api.create_script('policy2', TEST_CODE3) created_by='')
self._db_api.create_script('policy2', TEST_CODE3,
created_by='')
self._pyscripts.reload_config() self._pyscripts.reload_config()
self.assertEqual(2, len(self._pyscripts._scripts)) self.assertEqual(2, len(self._pyscripts._scripts))
@@ -403,7 +429,8 @@ class PyScriptsRatingTest(tests.TestCase):
# Processing # Processing
def test_process_rating(self): def test_process_rating(self):
self._db_api.create_script('policy1', COMPLEX_POLICY1) self._db_api.create_script('policy1', COMPLEX_POLICY1,
created_by='')
self._pyscripts.reload_config() self._pyscripts.reload_config()
data_output = self._pyscripts.process(self.dataframe_for_tests) data_output = self._pyscripts.process(self.dataframe_for_tests)
@@ -413,21 +440,22 @@ class PyScriptsRatingTest(tests.TestCase):
for point in dict_output['usage']['compute']: for point in dict_output['usage']['compute']:
if point['groupby'].get('flavor') == 'm1.nano': if point['groupby'].get('flavor') == 'm1.nano':
self.assertEqual( self.assertEqual(
decimal.Decimal('2'), point['rating']['price']) decimal.Decimal('2'), point['rating']['price'])
else: else:
self.assertEqual( self.assertEqual(
decimal.Decimal('0'), point['rating']['price']) decimal.Decimal('0'), point['rating']['price'])
for point in dict_output['usage']['instance_status']: for point in dict_output['usage']['instance_status']:
if point['groupby'].get('flavor') == 'm1.ultra': if point['groupby'].get('flavor') == 'm1.ultra':
self.assertEqual( self.assertEqual(
decimal.Decimal('96'), point['rating']['price']) decimal.Decimal('96'), point['rating']['price'])
else: else:
self.assertEqual( self.assertEqual(
decimal.Decimal('0'), point['rating']['price']) decimal.Decimal('0'), point['rating']['price'])
# Processing # Processing
def test_process_rating_with_documentation_rules(self): def test_process_rating_with_documentation_rules(self):
self._db_api.create_script('policy1', DOCUMENTATION_RATING_POLICY) self._db_api.create_script('policy1', DOCUMENTATION_RATING_POLICY,
created_by='')
self._pyscripts.reload_config() self._pyscripts.reload_config()
dataframe_for_tests = copy.deepcopy(self.dataframe_for_tests) dataframe_for_tests = copy.deepcopy(self.dataframe_for_tests)
@@ -456,7 +484,7 @@ class PyScriptsRatingTest(tests.TestCase):
for point in dict_output['usage']['instance_status']: for point in dict_output['usage']['instance_status']:
if point['groupby'].get('flavor') == 'm1.ultra': if point['groupby'].get('flavor') == 'm1.ultra':
self.assertEqual( self.assertEqual(
decimal.Decimal('0'), point['rating']['price']) decimal.Decimal('0'), point['rating']['price'])
else: else:
self.assertEqual( self.assertEqual(
decimal.Decimal('0'), point['rating']['price']) decimal.Decimal('0'), point['rating']['price'])

View File

@@ -69,7 +69,17 @@ Rating rules
Rating rules are the expressions used to create a charge (assign a value to Rating rules are the expressions used to create a charge (assign a value to
a computing resource consumption). Rating rules can be created with a computing resource consumption). Rating rules can be created with
PyScripts or with the use of fields, services and groups with hashmap PyScripts or with the use of fields, services and groups with hashmap
rating rules. rating rules. You can define a ``start`` and ``end`` dates to the rating rules
(PyScripts and hashmap mappings), which define the period they will be valid
and applied in the rating process. If neither ``start`` or ``end`` dates are
defined, they will be set as the rule's creation date and ``None``
respectively. A ``None`` ``end`` date means the rating rule will last
indefinitely. The ``start`` date cannot be set as a past date, if you really
need to do it, you can force it using a ``force`` flag when creating the rule.
Once the rule starts running (rule's ``start`` > current date), you will not be
able to update the rule anymore, in this case, if you need to change the
rule's value, you will need to delete it and create it again with the new
value.
If we have a hashmap mapping configuration for a service and another If we have a hashmap mapping configuration for a service and another
hashmap map configuration for a field that belongs to the same service, hashmap map configuration for a field that belongs to the same service,

View File

@@ -242,15 +242,18 @@ instance to a cost of 0.01:
--value 93195dd4-bbf3-4b13-929d-8293ae72e056 \ --value 93195dd4-bbf3-4b13-929d-8293ae72e056 \
-g 9a2ff37d-be86-4642-8b7d-567bace61f06 \ -g 9a2ff37d-be86-4642-8b7d-567bace61f06 \
-t flat -t flat
+--------------------------------------+--------------------------------------+------------+------+--------------------------------------+------------+--------------------------------------+------------+ +--------------------------------------+--------------------------------------+---------------------+---------------------+------+------+-------------+---------+----------------------------------+------------+------------+------------+------+--------------------------------------+------------+--------------------------------------+------------+
| Mapping ID | Value | Cost | Type | Field ID | Service ID | Group ID | Project ID | | Mapping ID | Value | Created At | Start | End | Name | Description | Deleted | Created By | Updated By | Deleted By | Cost | Type | Field ID | Service ID | Group ID | Project ID |
+--------------------------------------+--------------------------------------+------------+------+--------------------------------------+------------+--------------------------------------+------------+ +--------------------------------------+--------------------------------------+---------------------+---------------------+------+------+-------------+---------+----------------------------------+------------+------------+------------+------+--------------------------------------+------------+--------------------------------------+------------+
| 9c2418dc-99d3-44b6-8fdf-e9fa02f3ceb5 | 93195dd4-bbf3-4b13-929d-8293ae72e056 | 0.01000000 | flat | 18aa50b6-6da8-4c47-8a1f-43236b971625 | None | 9a2ff37d-be86-4642-8b7d-567bace61f06 | None | | 9c2418dc-99d3-44b6-8fdf-e9fa02f3ceb5 | 93195dd4-bbf3-4b13-929d-8293ae72e056 | 2023-01-01T10:00:00 | 2023-01-01T10:00:00 | None | None | None | None | 7977999e2e2511e6a8b2df30b233ffcb | None | None | 0.01000000 | flat | 18aa50b6-6da8-4c47-8a1f-43236b971625 | None | 9a2ff37d-be86-4642-8b7d-567bace61f06 | None |
+--------------------------------------+--------------------------------------+------------+------+--------------------------------------+------------+--------------------------------------+------------+ +--------------------------------------+--------------------------------------+---------------------+---------------------+------+------+-------------+---------+----------------------------------+------------+------------+------------+------+--------------------------------------+------------+--------------------------------------+------------+
In this example every machine in any project with the flavor m1.tiny will be In this example every machine in any project with the flavor m1.tiny will be
rated 0.01 per collection period. rated 0.01 for the collection period from 2023-01-01T10:00:00 onwards. To use
a custom period (``start`` and ``end``) that the hashmap mapping will be valid,
you can use the parameters ``--start`` and ``--end``. To use ``start`` value in
the past, use the ``--force`` parameter.
Volume per GiB with discount Volume per GiB with discount
@@ -295,11 +298,11 @@ Now let's setup the price per gigabyte:
$ cloudkitty hashmap mapping create 0.001 \ $ cloudkitty hashmap mapping create 0.001 \
-s 74ad7e4e-9cae-45a8-884b-368a92803afe \ -s 74ad7e4e-9cae-45a8-884b-368a92803afe \
-t flat -g 9736bbc0-8888-4700-96fc-58db5fded493 -t flat -g 9736bbc0-8888-4700-96fc-58db5fded493
+--------------------------------------+-------+------------+------+----------+--------------------------------------+--------------------------------------+------------+ +--------------------------------------+-------+---------------------+---------------------+------+------+-------------+---------+----------------------------------+------------+------------+------------+------+----------+--------------------------------------+--------------------------------------+------------+
| Mapping ID | Value | Cost | Type | Field ID | Service ID | Group ID | Project ID | | Mapping ID | Value | Created At | Start | End | Name | Description | Deleted | Created By | Updated By | Deleted By | Cost | Type | Field ID | Service ID | Group ID | Project ID |
+--------------------------------------+-------+------------+------+----------+--------------------------------------+--------------------------------------+------------+ +--------------------------------------+-------+---------------------+---------------------+------+------+-------------+---------+----------------------------------+------------+------------+------------+------+----------+--------------------------------------+--------------------------------------+------------+
| 09e36b13-ce89-4bd0-bbf1-1b80577031e8 | None | 0.00100000 | flat | None | 74ad7e4e-9cae-45a8-884b-368a92803afe | 9736bbc0-8888-4700-96fc-58db5fded493 | None | | 09e36b13-ce89-4bd0-bbf1-1b80577031e8 | None | 2023-01-01T10:00:00 | 2023-01-01T10:00:00 | None | None | None | None | 7977999e2e2511e6a8b2df30b233ffcb | None | None | 0.00100000 | flat | None | 74ad7e4e-9cae-45a8-884b-368a92803afe | 9736bbc0-8888-4700-96fc-58db5fded493 | None |
+--------------------------------------+-------+------------+------+----------+--------------------------------------+--------------------------------------+------------+ +--------------------------------------+-------+---------------------+---------------------+------+------+-------------+---------+----------------------------------+------------+------------+------------+------+----------+--------------------------------------+--------------------------------------+------------+
We have the basic price per gigabyte be we now want to apply a discount on huge We have the basic price per gigabyte be we now want to apply a discount on huge

View File

@@ -114,21 +114,25 @@ To use your script for rating, you will need to enable the pyscripts module
Adding the script to CloudKitty Adding the script to CloudKitty
------------------------------- -------------------------------
Create the script and specify its name. Create the script and specify its name, description, start and end dates.
If the ``start`` and ``end`` are not given, the ``start`` will be set as the
creation date and the ``end`` as ``None``. The script is valid from the
``start`` time until the ``end`` time, if the ``end`` time is ``None``, the
script is endless.
.. code-block:: console .. code-block:: console
$ cloudkitty pyscript create my_awesome_script script.py $ cloudkitty pyscript create my_awesome_script script.py
+-------------------+--------------------------------------+------------------------------------------+---------------------------------------+ +-------------------+--------------------------------------+---------------------+---------------------+------+-------------+---------+----------------------------------+------------+------------+------------------------------------------+---------------------------------------+
| Name | Script ID | Checksum | Data | | Name | Script ID | Created At | Start | End | Description | Deleted | Created By | Updated By | Deleted By | Checksum | Data |
+-------------------+--------------------------------------+------------------------------------------+---------------------------------------+ +-------------------+--------------------------------------+---------------------+---------------------+------+-------------+---------+----------------------------------+------------+------------+------------------------------------------+---------------------------------------+
| my_awesome_script | 78e1955a-4e7e-47e3-843c-524d8e6ad4c4 | 49e889018eb86b2035437ebb69093c0b6379f18c | from __future__ import print_function | | my_awesome_script | 78e1955a-4e7e-47e3-843c-524d8e6ad4c4 | 2023-01-01T10:00:00 | 2023-01-01T10:00:00 | None | None | None | 7977999e2e2511e6a8b2df30b233ffcb | None | None | 49e889018eb86b2035437ebb69093c0b6379f18c | from __future__ import print_function |
| | | | from cloudkitty import rating | | | | | | | | | | | | | from cloudkitty import rating |
| | | | | | | | | | | | | | | | | |
| | | | import decimal | | | | | | | | | | | | | import decimal |
| | | | | | | | | | | | | | | | | |
| | | | {...} | | | | | | | | | | | | | {...} |
| | | | | | | | | | | | | | | | | |
| | | | data = process(data) | | | | | | | | | | | | | data = process(data) |
| | | | | | | | | | | | | | | | | |
+-------------------+--------------------------------------+------------------------------------------+---------------------------------------+ +-------------------+--------------------------------------+---------------------+---------------------+------+-------------+---------+----------------------------------+------------+------------+------------------------------------------+---------------------------------------+

View File

@@ -0,0 +1,21 @@
---
features:
- |
Add new fields to specify the validity period of rating rules for modules
``hashmap`` and ``pyscripts``. These new fields also improve the audit
mechanism for rating rules changes and deletion, allowing users to know
when the rules were changed or removed (marked as deleted) and by whom.
upgrade:
- |
New rules for both ``hashmap`` and ``pyscript`` modules will no longer be
reprocessed for past periods, by default they are valid from the moment
they were created on. To allow new rules to be valid for past periods,
operators will need to specify a ``start_date`` in the past and pass the
query parameter ``force=true`` when creating the rating rule, then the
rule will be valid and used since the defined start date. Update rating
rules for both ``hashmap`` and ``pyscript`` modules will no longer be
allowed for rules where the ``start_date`` is in the past, to do so, you
will need to delete and create a new rating rule with the desired values,
the unique value you can update for rules that already started, is the
``end_date``.