Merge "Add monasca-log-api source code"

This commit is contained in:
Zuul
2019-07-16 09:35:55 +00:00
committed by Gerrit Code Review
70 changed files with 5728 additions and 0 deletions

View File

View File

View File

@@ -0,0 +1,126 @@
# Copyright 2017 FUJITSU LIMITED
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Module contains factories to initializes various applications
of monasca-log-api
"""
import falcon
import six
from monasca_log_api import config
from monasca_log_api.app.base import error_handlers
from monasca_log_api.app.base import request
from monasca_log_api.app.controller import healthchecks
from monasca_log_api.app.controller import versions
from monasca_log_api.app.controller.v2 import logs as v2_logs
from monasca_log_api.app.controller.v3 import logs as v3_logs
from oslo_log import log
def error_trap(app_name):
"""Decorator trapping any error during application boot time"""
@six.wraps(error_trap)
def _wrapper(func):
@six.wraps(_wrapper)
def _inner_wrapper(*args, **kwargs):
try:
return func(*args, **kwargs)
except Exception:
logger = log.getLogger(__name__)
logger.exception('Failed to load application \'%s\'', app_name)
raise
return _inner_wrapper
return _wrapper
def singleton_config(func):
"""Decorator ensuring that configuration is loaded only once."""
@six.wraps(singleton_config)
def _wrapper(global_config, **local_conf):
config.parse_args()
return func(global_config, **local_conf)
return _wrapper
@error_trap('version')
def create_version_app(global_conf, **local_conf):
"""Creates Version application"""
ctrl = versions.Versions()
controllers = {
'/': ctrl, # redirect http://host:port/ down to Version app
# avoid conflicts with actual pipelines and 404 error
'/version': ctrl, # list all the versions
'/version/{version_id}': ctrl # display details of the version
}
wsgi_app = falcon.API(
request_type=request.Request
)
for route, ctrl in controllers.items():
wsgi_app.add_route(route, ctrl)
return wsgi_app
@error_trap('healthcheck')
def create_healthcheck_app(global_conf, **local_conf):
"""Creates Healthcheck application"""
ctrl = healthchecks.HealthChecks()
controllers = {
'/': ctrl
}
wsgi_app = falcon.API(
request_type=request.Request
)
for route, ctrl in controllers.items():
wsgi_app.add_route(route, ctrl)
return wsgi_app
@error_trap('api')
@singleton_config
def create_api_app(global_conf, **local_conf):
"""Creates MainAPI application"""
controllers = {}
api_version = global_conf.get('api_version')
if api_version == 'v2.0':
controllers.update({
'/log/single': v2_logs.Logs()
})
elif api_version == 'v3.0':
controllers.update({
'/logs': v3_logs.Logs()
})
wsgi_app = falcon.API(
request_type=request.Request
)
for route, ctrl in controllers.items():
wsgi_app.add_route(route, ctrl)
error_handlers.register_error_handlers(wsgi_app)
return wsgi_app

View File

@@ -0,0 +1,28 @@
# Copyright 2016 FUJITSU LIMITED
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import falcon
from monasca_log_api.app.base import model
def log_envelope_exception_handler(ex, req, resp, params):
raise falcon.HTTPUnprocessableEntity(
title='Failed to create Envelope',
description=ex.message)
def register_error_handlers(app):
app.add_error_handler(model.LogEnvelopeException,
log_envelope_exception_handler)

View File

@@ -0,0 +1,38 @@
# Copyright 2015 kornicameister@gmail.com
# Copyright 2015 FUJITSU LIMITED
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import falcon
HTTP_422 = '422 Unprocessable Entity'
class HTTPUnprocessableEntity(falcon.OptionalRepresentation, falcon.HTTPError):
"""HTTPUnprocessableEntity http error.
HTTPError that comes with '422 Unprocessable Entity' status
:argument: message(str) - meaningful description of what caused an error
:argument: kwargs - any other option defined in
:py:class:`falcon.OptionalRepresentation` and
:py:class:`falcon.HTTPError`
"""
def __init__(self, message, **kwargs):
falcon.HTTPError.__init__(self,
HTTP_422,
'unprocessable_entity',
message,
**kwargs
)

View File

@@ -0,0 +1,252 @@
# Copyright 2015 kornicameister@gmail.com
# Copyright 2016-2017 FUJITSU LIMITED
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import time
import falcon
from monasca_common.kafka import producer
from monasca_common.rest import utils as rest_utils
from monasca_log_api import conf
from monasca_log_api.app.base import model
from monasca_log_api.monitoring import client
from monasca_log_api.monitoring import metrics
from oslo_log import log
from oslo_utils import encodeutils
LOG = log.getLogger(__name__)
CONF = conf.CONF
_RETRY_AFTER = 60
_TIMESTAMP_KEY_SIZE = len(
bytearray(str(int(time.time() * 1000)).encode('utf-8')))
_TRUNCATED_PROPERTY_SIZE = len(
bytearray('"truncated": true'.encode('utf-8')))
_KAFKA_META_DATA_SIZE = 32
_TRUNCATION_SAFE_OFFSET = 1
class InvalidMessageException(Exception):
pass
class LogPublisher(object):
"""Publishes log data to Kafka
LogPublisher is able to send single message to multiple configured topic.
It uses following configuration written in conf file ::
[log_publisher]
topics = 'logs'
kafka_url = 'localhost:8900'
Note:
Uses :py:class:`monasca_common.kafka.producer.KafkaProducer`
to ship logs to kafka. For more details
see `monasca_common`_ github repository.
.. _monasca_common: https://github.com/openstack/monasca-common
"""
def __init__(self):
self._topics = CONF.log_publisher.topics
self.max_message_size = CONF.log_publisher.max_message_size
self._kafka_publisher = producer.KafkaProducer(
url=CONF.log_publisher.kafka_url
)
if CONF.monitoring.enable:
self._statsd = client.get_client()
# setup counter, gauges etc
self._logs_published_counter = self._statsd.get_counter(
metrics.LOGS_PUBLISHED_METRIC
)
self._publish_time_ms = self._statsd.get_timer(
metrics.LOGS_PUBLISH_TIME_METRIC
)
self._logs_lost_counter = self._statsd.get_counter(
metrics.LOGS_PUBLISHED_LOST_METRIC
)
self._logs_truncated_gauge = self._statsd.get_gauge(
metrics.LOGS_TRUNCATED_METRIC
)
LOG.info('Initializing LogPublisher <%s>', self)
def send_message(self, messages):
"""Sends message to each configured topic.
Note:
Falsy messages (i.e. empty) are not shipped to kafka
See also
* :py:class:`monasca_log_api.common.model.Envelope`
* :py:meth:`._is_message_valid`
:param dict|list messages: instance (or instances) of log envelope
"""
if not messages:
return
if not isinstance(messages, list):
messages = [messages]
sent_counter = 0
num_of_msgs = len(messages)
LOG.debug('About to publish %d messages to %s topics',
num_of_msgs, self._topics)
try:
send_messages = []
for message in messages:
msg = self._transform_message(message)
send_messages.append(msg)
if CONF.monitoring.enable:
with self._publish_time_ms.time(name=None):
self._publish(send_messages)
else:
self._publish(send_messages)
sent_counter = len(send_messages)
except Exception as ex:
LOG.exception('Failure in publishing messages to kafka')
raise ex
finally:
self._after_publish(sent_counter, num_of_msgs)
def _transform_message(self, message):
"""Transforms message into JSON.
Method executes transformation operation for
single element. Operation is set of following
operations:
* checking if message is valid
(:py:func:`.LogPublisher._is_message_valid`)
* truncating message if necessary
(:py:func:`.LogPublisher._truncate`)
:param model.Envelope message: instance of message
:return: serialized message
:rtype: str
"""
if not self._is_message_valid(message):
raise InvalidMessageException()
truncated = self._truncate(message)
return encodeutils.safe_encode(truncated, incoming='utf-8')
def _truncate(self, envelope):
"""Truncates the message if needed.
Each message send to kafka is verified.
Method checks if message serialized to json
exceeds maximum allowed size that can be posted to kafka
queue. If so, method truncates message property of the log
by difference between message and allowed size.
:param Envelope envelope: original envelope
:return: serialized message
:rtype: str
"""
msg_str = model.serialize_envelope(envelope)
envelope_size = ((len(bytearray(msg_str, 'utf-8', 'replace')) +
_TIMESTAMP_KEY_SIZE +
_KAFKA_META_DATA_SIZE)
if msg_str is not None else -1)
diff_size = ((envelope_size - self.max_message_size) +
_TRUNCATION_SAFE_OFFSET)
if diff_size > 1:
truncated_by = diff_size + _TRUNCATED_PROPERTY_SIZE
LOG.warning(('Detected message that exceeds %d bytes,'
'message will be truncated by %d bytes'),
self.max_message_size,
truncated_by)
log_msg = envelope['log']['message']
truncated_log_msg = log_msg[:-truncated_by]
envelope['log']['truncated'] = True
envelope['log']['message'] = truncated_log_msg
if CONF.monitoring.enable:
self._logs_truncated_gauge.send(name=None, value=truncated_by)
msg_str = rest_utils.as_json(envelope)
else:
if CONF.monitoring.enable:
self._logs_truncated_gauge.send(name=None, value=0)
return msg_str
def _publish(self, messages):
"""Publishes messages to kafka.
:param list messages: list of messages
"""
num_of_msg = len(messages)
LOG.debug('Publishing %d messages', num_of_msg)
try:
for topic in self._topics:
self._kafka_publisher.publish(
topic,
messages
)
LOG.debug('Sent %d messages to topic %s', num_of_msg, topic)
except Exception as ex:
raise falcon.HTTPServiceUnavailable('Service unavailable',
str(ex), 60)
@staticmethod
def _is_message_valid(message):
"""Validates message before sending.
Methods checks if message is :py:class:`model.Envelope`.
By being instance of this class it is ensured that all required
keys are found and they will have their values.
"""
return message and isinstance(message, model.Envelope)
def _after_publish(self, send_count, to_send_count):
"""Executed after publishing to sent metrics.
:param int send_count: how many messages have been sent
:param int to_send_count: how many messages should be sent
"""
failed_to_send = to_send_count - send_count
if failed_to_send == 0:
LOG.debug('Successfully published all [%d] messages',
send_count)
else:
error_str = ('Failed to send all messages, %d '
'messages out of %d have not been published')
LOG.error(error_str, failed_to_send, to_send_count)
if CONF.monitoring.enable:
self._logs_published_counter.increment(value=send_count)
self._logs_lost_counter.increment(value=failed_to_send)

View File

@@ -0,0 +1,119 @@
# Copyright 2016 FUJITSU LIMITED
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_utils import timeutils
import six
from monasca_common.rest import utils as rest_utils
def serialize_envelope(envelope):
"""Returns json representation of an envelope.
:return: json object of envelope
:rtype: six.text_type
"""
json = rest_utils.as_json(envelope, ensure_ascii=False)
if six.PY2:
raw = six.text_type(json.replace(r'\\', r'\\\\'), encoding='utf-8',
errors='replace')
else:
raw = json
return raw
class LogEnvelopeException(Exception):
pass
class Envelope(dict):
def __init__(self, log, meta):
if not log:
error_msg = 'Envelope cannot be created without log'
raise LogEnvelopeException(error_msg)
if 'tenantId' not in meta or not meta.get('tenantId'):
error_msg = 'Envelope cannot be created without tenant'
raise LogEnvelopeException(error_msg)
creation_time = self._get_creation_time()
super(Envelope, self).__init__(
log=log,
creation_time=creation_time,
meta=meta
)
@staticmethod
def _get_creation_time():
return timeutils.utcnow_ts()
@classmethod
def new_envelope(cls, log, tenant_id, region, dimensions=None):
"""Creates new log envelope
Log envelope is combined ouf of following properties
* log - dict
* creation_time - timestamp
* meta - meta block
Example output json would like this:
.. code-block:: json
{
"log": {
"message": "Some message",
"dimensions": {
"hostname": "devstack"
}
},
"creation_time": 1447834886,
"meta": {
"tenantId": "e4bd29509eda473092d32aadfee3e7b1",
"region": "pl"
}
}
:param dict log: original log element (containing message and other
params
:param str tenant_id: tenant id to be put in meta field
:param str region: region to be put in meta field
:param dict dimensions: additional dimensions to be appended to log
object dimensions
"""
if dimensions:
log['dimensions'].update(dimensions)
log_meta = {
'region': region,
'tenantId': tenant_id
}
return cls(log, log_meta)
@property
def log(self):
return self.get('log', None)
@property
def creation_time(self):
return self.get('creation_time', None)
@property
def meta(self):
return self.get('meta', None)

View File

@@ -0,0 +1,109 @@
# Copyright 2016 FUJITSU LIMITED
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import falcon
from monasca_common.policy import policy_engine as policy
from monasca_log_api import policies
from monasca_log_api.app.base import request_context
from monasca_log_api.app.base import validation
policy.POLICIES = policies
_TENANT_ID_PARAM = 'tenant_id'
"""Name of the query-param pointing at project-id (tenant-id)"""
class Request(falcon.Request):
"""Variation of falcon.Request with context
Following class enhances :py:class:`falcon.Request` with
:py:class:`context.RequestContext`.
"""
def __init__(self, env, options=None):
super(Request, self).__init__(env, options)
self.context = request_context.RequestContext.from_environ(self.env)
def validate(self, content_types):
"""Performs common request validation
Validation checklist (in that order):
* :py:func:`validation.validate_content_type`
* :py:func:`validation.validate_payload_size`
* :py:func:`validation.validate_cross_tenant`
:param content_types: allowed content-types handler supports
:type content_types: list
:raises Exception: if any of the validation fails
"""
validation.validate_content_type(self, content_types)
validation.validate_payload_size(self)
validation.validate_cross_tenant(
tenant_id=self.project_id,
roles=self.roles,
cross_tenant_id=self.cross_project_id
)
@property
def project_id(self):
"""Returns project-id (tenant-id)
:return: project-id
:rtype: str
"""
return self.context.project_id
@property
def cross_project_id(self):
"""Returns project-id (tenant-id) found in query params.
This particular project-id is later on identified as
cross-project-id
:return: project-id
:rtype: str
"""
return self.get_param(_TENANT_ID_PARAM, required=False)
@property
def user_id(self):
"""Returns user-id
:return: user-id
:rtype: str
"""
return self.context.user
@property
def roles(self):
"""Returns roles associated with user
:return: user's roles
:rtype: list
"""
return self.context.roles
def can(self, action, target=None):
return self.context.can(action, target)
def __repr__(self):
return '%s, context=%s' % (self.path, self.context)

View File

@@ -0,0 +1,36 @@
# Copyright 2017 FUJITSU LIMITED
# Copyright 2018 OP5 AB
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from monasca_common.policy import policy_engine as policy
from monasca_log_api import policies
from oslo_context import context
policy.POLICIES = policies
class RequestContext(context.RequestContext):
"""RequestContext.
RequestContext is customized version of
:py:class:oslo_context.context.RequestContext.
"""
def can(self, action, target=None):
if target is None:
target = {'project_id': self.project_id,
'user_id': self.user_id}
return policy.authorize(self, action=action, target=target)

View File

@@ -0,0 +1,267 @@
# Copyright 2016-2017 FUJITSU LIMITED
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import re
import falcon
import six
from monasca_log_api import conf
from monasca_log_api.app.base import exceptions
from oslo_log import log
LOG = log.getLogger(__name__)
CONF = conf.CONF
APPLICATION_TYPE_CONSTRAINTS = {
'MAX_LENGTH': 255,
'PATTERN': re.compile('^[a-zA-Z0-9_.\\-]+$')
}
"""Application type constraint used in validation.
See :py:func:`Validations.validate_application_type`
"""
DIMENSION_NAME_CONSTRAINTS = {
'MAX_LENGTH': 255,
'PATTERN': re.compile('[^><={}(), \'";&]+$')
}
"""Constraint for name of single dimension.
See :py:func:`Validations.validate_dimensions`
"""
DIMENSION_VALUE_CONSTRAINTS = {
'MAX_LENGTH': 255
}
"""Constraint for value of single dimension.
See :py:func:`Validations.validate_dimensions`
"""
def validate_application_type(application_type=None):
"""Validates application type.
Validation won't take place if application_type is None.
For details see: :py:data:`APPLICATION_TYPE_CONSTRAINTS`
:param str application_type: application type
"""
def validate_length():
if (len(application_type) >
APPLICATION_TYPE_CONSTRAINTS['MAX_LENGTH']):
msg = ('Application type {type} must be '
'{length} characters or less')
raise exceptions.HTTPUnprocessableEntity(
msg.format(
type=application_type,
length=APPLICATION_TYPE_CONSTRAINTS[
'MAX_LENGTH']
)
)
def validate_match():
if (not APPLICATION_TYPE_CONSTRAINTS['PATTERN']
.match(application_type)):
raise exceptions.HTTPUnprocessableEntity(
'Application type %s may only contain: "a-z A-Z 0-9 _ - ."'
% application_type
)
if application_type:
validate_length()
validate_match()
def _validate_dimension_name(name):
try:
if len(name) > DIMENSION_NAME_CONSTRAINTS['MAX_LENGTH']:
raise exceptions.HTTPUnprocessableEntity(
'Dimension name %s must be 255 characters or less' %
name
)
if name[0] == '_':
raise exceptions.HTTPUnprocessableEntity(
'Dimension name %s cannot start with underscore (_)' %
name
)
if not DIMENSION_NAME_CONSTRAINTS['PATTERN'].match(name):
raise exceptions.HTTPUnprocessableEntity(
'Dimension name %s may not contain: %s' %
(name, '> < = { } ( ) \' " , ; &')
)
except (TypeError, IndexError):
raise exceptions.HTTPUnprocessableEntity(
'Dimension name cannot be empty'
)
def _validate_dimension_value(value):
try:
value[0]
if len(value) > DIMENSION_VALUE_CONSTRAINTS['MAX_LENGTH']:
raise exceptions.HTTPUnprocessableEntity(
'Dimension value %s must be 255 characters or less' %
value
)
except (TypeError, IndexError):
raise exceptions.HTTPUnprocessableEntity(
'Dimension value cannot be empty'
)
def validate_dimensions(dimensions):
"""Validates dimensions type.
Empty dimensions are not being validated.
For details see:
:param dict dimensions: dimensions to validate
* :py:data:`DIMENSION_NAME_CONSTRAINTS`
* :py:data:`DIMENSION_VALUE_CONSTRAINTS`
"""
try:
for dim_name, dim_value in dimensions.items():
_validate_dimension_name(dim_name)
_validate_dimension_value(dim_value)
except AttributeError:
raise exceptions.HTTPUnprocessableEntity(
'Dimensions %s must be a dictionary (map)' % dimensions)
def validate_content_type(req, allowed):
"""Validates content type.
Method validates request against correct
content type.
If content-type cannot be established (i.e. header is missing),
:py:class:`falcon.HTTPMissingHeader` is thrown.
If content-type is not **application/json** or **text/plain**,
:py:class:`falcon.HTTPUnsupportedMediaType` is thrown.
:param falcon.Request req: current request
:param iterable allowed: allowed content type
:exception: :py:class:`falcon.HTTPMissingHeader`
:exception: :py:class:`falcon.HTTPUnsupportedMediaType`
"""
content_type = req.content_type
LOG.debug('Content-Type is %s', content_type)
if content_type is None or len(content_type) == 0:
raise falcon.HTTPMissingHeader('Content-Type')
if content_type not in allowed:
sup_types = ', '.join(allowed)
details = ('Only [%s] are accepted as logs representations'
% str(sup_types))
raise falcon.HTTPUnsupportedMediaType(description=details)
def validate_payload_size(req):
"""Validates payload size.
Method validates sent payload size.
It expects that http header **Content-Length** is present.
If it does not, method raises :py:class:`falcon.HTTPLengthRequired`.
Otherwise values is being compared with ::
[service]
max_log_size = 1048576
**max_log_size** refers to the maximum allowed content length.
If it is exceeded :py:class:`falcon.HTTPRequestEntityTooLarge` is
thrown.
:param falcon.Request req: current request
:exception: :py:class:`falcon.HTTPLengthRequired`
:exception: :py:class:`falcon.HTTPRequestEntityTooLarge`
"""
payload_size = req.content_length
max_size = CONF.service.max_log_size
LOG.debug('Payload (content-length) is %s', str(payload_size))
if payload_size is None:
raise falcon.HTTPLengthRequired(
title='Content length header is missing',
description='Content length is required to estimate if '
'payload can be processed'
)
if payload_size >= max_size:
raise falcon.HTTPRequestEntityTooLarge(
title='Log payload size exceeded',
description='Maximum allowed size is %d bytes' % max_size
)
def validate_is_delegate(roles):
delegate_roles = CONF.roles_middleware.delegate_roles
if roles and delegate_roles:
roles = roles.split(',') if isinstance(roles, six.string_types) \
else roles
return any(x in set(delegate_roles) for x in roles)
return False
def validate_cross_tenant(tenant_id, cross_tenant_id, roles):
if not validate_is_delegate(roles):
if cross_tenant_id:
raise falcon.HTTPForbidden(
'Permission denied',
'Projects %s cannot POST cross tenant logs' % tenant_id
)
def validate_log_message(log_object):
"""Validates log property.
Log property should have message property.
Args:
log_object (dict): log property
"""
if 'message' not in log_object:
raise exceptions.HTTPUnprocessableEntity(
'Log property should have message'
)
def validate_authorization(http_request, authorized_rules_list):
"""Validates whether is authorized according to provided policy rules list.
If authorization fails, 401 is thrown with appropriate description.
Additionally response specifies 'WWW-Authenticate' header with 'Token'
value challenging the client to use different token (the one with
different set of roles which can access the service).
"""
challenge = 'Token'
for rule in authorized_rules_list:
try:
http_request.can(rule)
return
except Exception as ex:
LOG.debug(ex)
raise falcon.HTTPUnauthorized('Forbidden',
'The request does not have access to this service',
challenge)

View File

@@ -0,0 +1,24 @@
# Copyright 2015 kornicameister@gmail.com
# Copyright 2015 FUJITSU LIMITED
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import collections
LogApiHeader = collections.namedtuple('LogApiHeader', ['name', 'is_required'])
"""Tuple describing a header."""
X_TENANT_ID = LogApiHeader(name='X-Tenant-Id', is_required=False)
X_ROLES = LogApiHeader(name='X-Roles', is_required=False)
X_APPLICATION_TYPE = LogApiHeader(name='X-Application-Type', is_required=False)
X_DIMENSIONS = LogApiHeader(name='X_Dimensions', is_required=False)

View File

@@ -0,0 +1,60 @@
# Copyright 2016 FUJITSU LIMITED
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import collections
import falcon
from oslo_log import log
LOG = log.getLogger(__name__)
HealthCheckResult = collections.namedtuple('HealthCheckResult',
['status', 'details'])
# TODO(feature) monasca-common candidate
class HealthChecksApi(object):
"""HealthChecks Api
HealthChecksApi server information regarding health of the API.
"""
def __init__(self):
super(HealthChecksApi, self).__init__()
LOG.info('Initializing HealthChecksApi!')
def on_get(self, req, res):
"""Complex healthcheck report on GET.
Returns complex report regarding API well being
and all dependent services.
:param falcon.Request req: current request
:param falcon.Response res: current response
"""
res.status = falcon.HTTP_501
def on_head(self, req, res):
"""Simple healthcheck report on HEAD.
In opposite to :py:meth:`.HealthChecksApi.on_get`, this
method is supposed to execute ASAP to inform user that
API is up and running.
:param falcon.Request req: current request
:param falcon.Response res: current response
"""
res.status = falcon.HTTP_501

View File

@@ -0,0 +1,88 @@
# Copyright 2015 kornicameister@gmail.com
# Copyright 2016 FUJITSU LIMITED
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import falcon
from monasca_log_api import conf
from monasca_log_api.monitoring import client
from monasca_log_api.monitoring import metrics
from oslo_log import log
CONF = conf.CONF
LOG = log.getLogger(__name__)
class LogsApi(object):
"""Logs API.
Logs API acts as RESTful endpoint accepting
messages contains collected log entries from the system.
Works as gateway for any further processing for accepted data.
"""
def __init__(self):
super(LogsApi, self).__init__()
if CONF.monitoring.enable:
self._statsd = client.get_client()
# create_common counters, gauges etc.
self._metrics_dimensions = dimensions = {'version': self.version}
self._logs_in_counter = self._statsd.get_counter(
name=metrics.LOGS_RECEIVED_METRIC,
dimensions=dimensions
)
self._logs_size_gauge = self._statsd.get_gauge(
name=metrics.LOGS_RECEIVED_BYTE_SIZE_METRICS,
dimensions=dimensions
)
self._logs_rejected_counter = self._statsd.get_counter(
name=metrics.LOGS_REJECTED_METRIC,
dimensions=dimensions
)
self._logs_processing_time = self._statsd.get_timer(
name=metrics.LOGS_PROCESSING_TIME_METRIC,
dimensions=dimensions
)
LOG.info('Initializing LogsApi %s!' % self.version)
def on_post(self, req, res):
"""Accepts sent logs as text or json.
Accepts logs sent to resource which should
be sent to kafka queue.
:param req: current request
:param res: current response
"""
res.status = falcon.HTTP_501 # pragma: no cover
def on_get(self, req, res):
"""Queries logs matching specified dimension values.
Performs queries on the underlying log storage
against a time range and set of dimension values.
:param req: current request
:param res: current response
"""
res.status = falcon.HTTP_501 # pragma: no cover
@property
def version(self):
return getattr(self, 'VERSION')

View File

@@ -0,0 +1,34 @@
# Copyright 2015 kornicameister@gmail.com
# Copyright 2015 FUJITSU LIMITED
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import falcon
from oslo_log import log
LOG = log.getLogger(__name__)
class VersionsAPI(object):
"""Versions API
VersionsAPI returns information about API itself.
"""
def __init__(self):
super(VersionsAPI, self).__init__()
LOG.info('Initializing VersionsAPI!')
def on_get(self, req, res, version_id):
res.status = falcon.HTTP_501

View File

@@ -0,0 +1,60 @@
# Copyright 2016 FUJITSU LIMITED
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import falcon
from monasca_common.rest import utils as rest_utils
from monasca_log_api.app.base.validation import validate_authorization
from monasca_log_api.app.controller.api import healthcheck_api
from monasca_log_api.healthcheck import kafka_check
class HealthChecks(healthcheck_api.HealthChecksApi):
# response configuration
CACHE_CONTROL = ['must-revalidate', 'no-cache', 'no-store']
# response codes
HEALTHY_CODE_GET = falcon.HTTP_OK
HEALTHY_CODE_HEAD = falcon.HTTP_NO_CONTENT
NOT_HEALTHY_CODE = falcon.HTTP_SERVICE_UNAVAILABLE
def __init__(self):
self._kafka_check = kafka_check.KafkaHealthCheck()
super(HealthChecks, self).__init__()
def on_head(self, req, res):
validate_authorization(req, ['log_api:healthcheck:head'])
res.status = self.HEALTHY_CODE_HEAD
res.cache_control = self.CACHE_CONTROL
def on_get(self, req, res):
# at this point we know API is alive, so
# keep up good work and verify kafka status
validate_authorization(req, ['log_api:healthcheck:get'])
kafka_result = self._kafka_check.healthcheck()
# in case it'd be unhealthy,
# message will contain error string
status_data = {
'kafka': kafka_result.message
}
# Really simple approach, ideally that should be
# part of monasca-common with some sort of registration of
# healthchecks concept
res.status = (self.HEALTHY_CODE_GET
if kafka_result.healthy else self.NOT_HEALTHY_CODE)
res.cache_control = self.CACHE_CONTROL
res.body = rest_utils.as_json(status_data)

View File

@@ -0,0 +1,157 @@
# Copyright 2015 kornicameister@gmail.com
# Copyright 2016 FUJITSU LIMITED
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import datetime
from monasca_common.rest import utils as rest_utils
from monasca_log_api import conf
from monasca_log_api.app.base import exceptions
from monasca_log_api.app.base import model
from monasca_log_api.app.base import validation
from oslo_config import cfg
from oslo_log import log
LOG = log.getLogger(__name__)
CONF = conf.CONF
EPOCH_START = datetime.datetime(1970, 1, 1)
class LogCreator(object):
"""Transforms logs,
Takes care of transforming information received via
HTTP requests into log and log envelopes objects.
For more details see following:
* :py:func:`LogCreator.new_log`
* :py:func:`LogCreator.new_log_envelope`
"""
def __init__(self):
self._log = log.getLogger('service.LogCreator')
self._log.info('Initializing LogCreator')
@staticmethod
def _create_meta_info(tenant_id):
"""Creates meta block for log envelope.
Additionally method accesses oslo configuration,
looking for *service.region* configuration property.
For more details see :py:data:`service_opts`
:param tenant_id: ID of the tenant
:type tenant_id: str
:return: meta block
:rtype: dict
"""
return {
'tenantId': tenant_id,
'region': cfg.CONF.service.region
}
def new_log(self,
application_type,
dimensions,
payload,
content_type='application/json',
validate=True):
"""Creates new log object.
:param str application_type: origin of the log
:param dict dimensions: dictionary of dimensions (any data sent to api)
:param stream payload: stream to read log entry from
:param str content_type: actual content type used to send data to
server
:param bool validate: by default True, marks if log should be validated
:return: log object
:rtype: dict
:keyword: log_object
"""
payload = rest_utils.read_body(payload, content_type)
if not payload:
return None
# normalize_yet_again
application_type = parse_application_type(application_type)
dimensions = parse_dimensions(dimensions)
if validate:
self._log.debug('Validation enabled, proceeding with validation')
validation.validate_application_type(application_type)
validation.validate_dimensions(dimensions)
self._log.debug(
'application_type=%s,dimensions=%s' % (
application_type, dimensions)
)
log_object = {}
if content_type == 'application/json':
log_object.update(payload)
else:
log_object.update({'message': payload})
validation.validate_log_message(log_object)
dimensions['component'] = application_type
log_object.update({'dimensions': dimensions})
return log_object
def new_log_envelope(self, log_object, tenant_id):
return model.Envelope(
log=log_object,
meta=self._create_meta_info(tenant_id)
)
def parse_application_type(app_type):
if app_type:
app_type = app_type.strip()
return app_type if app_type else None
def parse_dimensions(dimensions):
if not dimensions:
raise exceptions.HTTPUnprocessableEntity('Dimension are required')
new_dimensions = {}
dimensions = map(str.strip, dimensions.split(','))
for dim in dimensions:
if not dim:
raise exceptions.HTTPUnprocessableEntity(
'Dimension cannot be empty')
elif ':' not in dim:
raise exceptions.HTTPUnprocessableEntity(
'%s is not a valid dimension' % dim)
dim = dim.split(':')
name = str(dim[0].strip()) if dim[0] else None
value = str(dim[1].strip()) if dim[1] else None
if name and value:
new_dimensions.update({name: value})
return new_dimensions

View File

@@ -0,0 +1,104 @@
# Copyright 2015 kornicameister@gmail.com
# Copyright 2016-2017 FUJITSU LIMITED
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import falcon
import six
from monasca_log_api import conf
from monasca_log_api.app.base import log_publisher
from monasca_log_api.app.base.validation import validate_authorization
from monasca_log_api.app.controller.api import headers
from monasca_log_api.app.controller.api import logs_api
from monasca_log_api.app.controller.v2.aid import service
CONF = conf.CONF
_DEPRECATED_INFO = ('/v2.0/log/single has been deprecated. '
'Please use /v3.0/logs')
class Logs(logs_api.LogsApi):
"""Logs Api V2."""
VERSION = 'v2.0'
SUPPORTED_CONTENT_TYPES = {'application/json', 'text/plain'}
def __init__(self):
self._log_creator = service.LogCreator()
self._kafka_publisher = log_publisher.LogPublisher()
super(Logs, self).__init__()
@falcon.deprecated(_DEPRECATED_INFO)
def on_post(self, req, res):
validate_authorization(req, ['log_api:logs:post'])
if CONF.monitoring.enable:
with self._logs_processing_time.time(name=None):
self.process_on_post_request(req, res)
else:
self.process_on_post_request(req, res)
def process_on_post_request(self, req, res):
try:
req.validate(self.SUPPORTED_CONTENT_TYPES)
tenant_id = (req.project_id if req.project_id
else req.cross_project_id)
log = self.get_log(request=req)
envelope = self.get_envelope(
log=log,
tenant_id=tenant_id
)
if CONF.monitoring.enable:
self._logs_size_gauge.send(name=None,
value=int(req.content_length))
self._logs_in_counter.increment()
except Exception:
# any validation that failed means
# log is invalid and rejected
if CONF.monitoring.enable:
self._logs_rejected_counter.increment()
raise
self._kafka_publisher.send_message(envelope)
res.status = falcon.HTTP_204
res.add_link(
target=str(_get_v3_link(req)),
rel='current', # [RFC5005]
title='V3 Logs',
type_hint='application/json'
)
res.append_header('DEPRECATED', 'true')
def get_envelope(self, log, tenant_id):
return self._log_creator.new_log_envelope(
log_object=log,
tenant_id=tenant_id
)
def get_log(self, request):
return self._log_creator.new_log(
application_type=request.get_header(*headers.X_APPLICATION_TYPE),
dimensions=request.get_header(*headers.X_DIMENSIONS),
payload=request.stream,
content_type=request.content_type
)
def _get_v3_link(req):
self_uri = req.uri
if six.PY2:
self_uri = self_uri.decode('UTF-8')
base_uri = self_uri.replace(req.relative_uri, '')
return '%s/v3.0/logs' % base_uri

View File

@@ -0,0 +1,157 @@
# Copyright 2016 FUJITSU LIMITED
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from monasca_log_api import conf
from monasca_log_api.app.base import log_publisher
from monasca_log_api.app.base import model
from monasca_log_api.app.base import validation
from oslo_log import log
LOG = log.getLogger(__name__)
CONF = conf.CONF
class BulkProcessor(log_publisher.LogPublisher):
"""BulkProcessor for effective log processing and publishing.
BulkProcessor is customized version of
:py:class:`monasca_log_api.app.base.log_publisher.LogPublisher`
that utilizes processing of bulk request inside single loop.
"""
def __init__(self, logs_in_counter=None, logs_rejected_counter=None):
"""Initializes BulkProcessor.
:param logs_in_counter: V3 received logs counter
:param logs_rejected_counter: V3 rejected logs counter
"""
super(BulkProcessor, self).__init__()
if CONF.monitoring.enable:
assert logs_in_counter is not None
assert logs_rejected_counter is not None
self._logs_in_counter = logs_in_counter
self._logs_rejected_counter = logs_rejected_counter
self.service_region = CONF.service.region
def send_message(self, logs, global_dimensions=None, log_tenant_id=None):
"""Sends bulk package to kafka
:param list logs: received logs
:param dict global_dimensions: global dimensions for each log
:param str log_tenant_id: tenant who sent logs
"""
num_of_msgs = len(logs) if logs else 0
sent_count = 0
to_send_msgs = []
LOG.debug('Bulk package <logs=%d, dimensions=%s, tenant_id=%s>',
num_of_msgs, global_dimensions, log_tenant_id)
try:
for log_el in logs:
t_el = self._transform_message(log_el,
global_dimensions,
log_tenant_id)
if t_el:
to_send_msgs.append(t_el)
if CONF.monitoring.enable:
with self._publish_time_ms.time(name=None):
self._publish(to_send_msgs)
else:
self._publish(to_send_msgs)
sent_count = len(to_send_msgs)
except Exception as ex:
LOG.error('Failed to send bulk package <logs=%d, dimensions=%s>',
num_of_msgs, global_dimensions)
LOG.exception(ex)
raise ex
finally:
if CONF.monitoring.enable:
self._update_counters(len(to_send_msgs), num_of_msgs)
self._after_publish(sent_count, len(to_send_msgs))
def _update_counters(self, in_counter, to_send_counter):
rejected_counter = to_send_counter - in_counter
self._logs_in_counter.increment(value=in_counter)
self._logs_rejected_counter.increment(value=rejected_counter)
def _transform_message(self, log_element, *args):
try:
validation.validate_log_message(log_element)
log_envelope = model.Envelope.new_envelope(
log=log_element,
tenant_id=args[1],
region=self.service_region,
dimensions=self._get_dimensions(log_element,
global_dims=args[0])
)
msg_payload = (super(BulkProcessor, self)
._transform_message(log_envelope))
return msg_payload
except Exception as ex:
LOG.error('Log transformation failed, rejecting log')
LOG.exception(ex)
return None
def _create_envelope(self, log_element, tenant_id, dimensions=None):
"""Create a log envelope.
:param dict log_element: raw log element
:param str tenant_id: tenant who sent logs
:param dict dimensions: log dimensions
:return: log envelope
:rtype: model.Envelope
"""
return
def _get_dimensions(self, log_element, global_dims=None):
"""Get the dimensions of log element.
If global dimensions are specified and passed to this method,
both instances are merged with each other.
If neither is specified empty dictionary is returned.
If only local dimensions are specified they are returned without any
additional operations. The last statement applies also
to global dimensions.
:param dict log_element: raw log instance
:param dict global_dims: global dimensions or None
:return: local dimensions merged with global dimensions
:rtype: dict
"""
local_dims = log_element.get('dimensions', {})
if not global_dims:
global_dims = {}
if local_dims:
validation.validate_dimensions(local_dims)
dimensions = global_dims.copy()
dimensions.update(local_dims)
return dimensions

View File

@@ -0,0 +1,62 @@
# Copyright 2014 Hewlett-Packard
# Copyright 2015 Cray Inc. All Rights Reserved.
# Copyright 2016 Hewlett Packard Enterprise Development Company LP
# Copyright 2016 FUJITSU LIMITED
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import falcon
from monasca_common.rest import utils as rest_utils
from monasca_log_api.app.base import exceptions
from monasca_log_api.app.base import validation
from oslo_log import log
LOG = log.getLogger(__name__)
def read_json_msg_body(req):
"""Read the json_msg from the http request body and return them as JSON.
:param req: HTTP request object.
:return: Returns the metrics as a JSON object.
:raises falcon.HTTPBadRequest:
"""
try:
msg = req.stream.read()
json_msg = rest_utils.from_json(msg)
return json_msg
except rest_utils.exceptions.DataConversionException as ex:
LOG.debug(ex)
raise falcon.HTTPBadRequest('Bad request',
'Request body is not valid JSON')
except ValueError as ex:
LOG.debug(ex)
raise falcon.HTTPBadRequest('Bad request',
'Request body is not valid JSON')
def get_global_dimensions(request_body):
"""Get the top level dimensions in the HTTP request body."""
global_dims = request_body.get('dimensions', {})
validation.validate_dimensions(global_dims)
return global_dims
def get_logs(request_body):
"""Get the logs in the HTTP request body."""
if 'logs' not in request_body:
raise exceptions.HTTPUnprocessableEntity(
'Unprocessable Entity Logs not found')
return request_body['logs']

View File

@@ -0,0 +1,110 @@
# Copyright 2016 Hewlett Packard Enterprise Development Company, L.P.
# Copyright 2016-2017 FUJITSU LIMITED
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import falcon
from monasca_log_api import conf
from monasca_log_api.app.base import exceptions
from monasca_log_api.app.base import validation
from monasca_log_api.app.controller.api import logs_api
from monasca_log_api.app.controller.v3.aid import bulk_processor
from monasca_log_api.app.controller.v3.aid import helpers
from monasca_log_api.monitoring import metrics
from oslo_log import log
CONF = conf.CONF
LOG = log.getLogger(__name__)
class Logs(logs_api.LogsApi):
VERSION = 'v3.0'
SUPPORTED_CONTENT_TYPES = {'application/json'}
def __init__(self):
super(Logs, self).__init__()
if CONF.monitoring.enable:
self._processor = bulk_processor.BulkProcessor(
logs_in_counter=self._logs_in_counter,
logs_rejected_counter=self._logs_rejected_counter
)
self._bulks_rejected_counter = self._statsd.get_counter(
name=metrics.LOGS_BULKS_REJECTED_METRIC,
dimensions=self._metrics_dimensions
)
else:
self._processor = bulk_processor.BulkProcessor()
def on_post(self, req, res):
validation.validate_authorization(req, ['log_api:logs:post'])
if CONF.monitoring.enable:
with self._logs_processing_time.time(name=None):
self.process_on_post_request(req, res)
else:
self.process_on_post_request(req, res)
def process_on_post_request(self, req, res):
try:
req.validate(self.SUPPORTED_CONTENT_TYPES)
request_body = helpers.read_json_msg_body(req)
log_list = self._get_logs(request_body)
global_dimensions = self._get_global_dimensions(request_body)
except Exception as ex:
LOG.error('Entire bulk package has been rejected')
LOG.exception(ex)
if CONF.monitoring.enable:
self._bulks_rejected_counter.increment(value=1)
raise ex
if CONF.monitoring.enable:
self._bulks_rejected_counter.increment(value=0)
self._logs_size_gauge.send(name=None,
value=int(req.content_length))
tenant_id = (req.cross_project_id if req.cross_project_id
else req.project_id)
try:
self._processor.send_message(
logs=log_list,
global_dimensions=global_dimensions,
log_tenant_id=tenant_id
)
except Exception as ex:
res.status = getattr(ex, 'status', falcon.HTTP_500)
return
res.status = falcon.HTTP_204
@staticmethod
def _get_global_dimensions(request_body):
"""Get the top level dimensions in the HTTP request body."""
global_dims = request_body.get('dimensions', {})
validation.validate_dimensions(global_dims)
return global_dims
@staticmethod
def _get_logs(request_body):
"""Get the logs in the HTTP request body."""
if 'logs' not in request_body:
raise exceptions.HTTPUnprocessableEntity(
'Unprocessable Entity Logs not found')
return request_body['logs']

View File

@@ -0,0 +1,128 @@
# Copyright 2015 kornicameister@gmail.com
# Copyright 2016 FUJITSU LIMITED
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import falcon
import six
from monasca_common.rest import utils as rest_utils
from monasca_log_api.app.base.validation import validate_authorization
from monasca_log_api.app.controller.api import versions_api
_VERSIONS_TPL_DICT = {
'v2.0': {
'id': 'v2.0',
'links': [
{
'rel': 'logs',
'href': '/log/single'
}
],
'status': 'DEPRECATED',
'updated': "2015-09-01T00:00:00Z"
},
'v3.0': {
'id': 'v3.0',
'links': [
{
'rel': 'logs',
'href': '/logs'
}
],
'status': 'CURRENT',
'updated': "2016-03-01T00:00:00Z"
}
}
class Versions(versions_api.VersionsAPI):
"""Versions Api"""
@staticmethod
def handle_none_version_id(req, res, result):
for version in _VERSIONS_TPL_DICT:
selected_version = _parse_version(version, req)
result['elements'].append(selected_version)
res.body = rest_utils.as_json(result, sort_keys=True)
res.status = falcon.HTTP_200
@staticmethod
def handle_version_id(req, res, result, version_id):
if version_id in _VERSIONS_TPL_DICT:
result['elements'].append(_parse_version(version_id, req))
res.body = rest_utils.as_json(result, sort_keys=True)
res.status = falcon.HTTP_200
else:
error_body = {'message': '%s is not valid version' % version_id}
res.body = rest_utils.as_json(error_body)
res.status = falcon.HTTP_400
def on_get(self, req, res, version_id=None):
validate_authorization(req, ['log_api:versions:get'])
result = {
'links': _get_common_links(req),
'elements': []
}
if version_id is None:
self.handle_none_version_id(req, res, result)
else:
self.handle_version_id(req, res, result, version_id)
def _get_common_links(req):
self_uri = req.uri
if six.PY2:
self_uri = self_uri.decode(rest_utils.ENCODING)
base_uri = self_uri.replace(req.path, '')
return [
{
'rel': 'self',
'href': self_uri
},
{
'rel': 'version',
'href': '%s/version' % base_uri
},
{
'rel': 'healthcheck',
'href': '%s/healthcheck' % base_uri
}
]
def _parse_version(version_id, req):
self_uri = req.uri
if six.PY2:
self_uri = self_uri.decode(rest_utils.ENCODING)
base_uri = self_uri.replace(req.path, '')
# need to get template dict, consecutive calls
# needs to operate on unmodified instance
selected_version = _VERSIONS_TPL_DICT[version_id].copy()
raw_links = selected_version['links']
links = []
for link in raw_links:
raw_link_href = link.get('href')
raw_link_rel = link.get('rel')
link_href = base_uri + '/' + version_id + raw_link_href
links.append({
'href': link_href,
'rel': raw_link_rel
})
selected_version['links'] = links
return selected_version

View File

@@ -0,0 +1,46 @@
# Copyright 2017 FUJITSU LIMITED
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Allows to run monasca-log-api from within local [dev] environment.
Primarily used for development.
"""
import sys
from monasca_log_api import version
from paste import deploy
from paste import httpserver
def get_wsgi_app():
config_dir = 'etc/monasca'
return deploy.loadapp(
'config:%s/log-api-paste.ini' % config_dir,
relative_to='./',
name='main'
)
def main():
wsgi_app = get_wsgi_app()
server_version = 'log-api/%s' % version.version_str
server = httpserver.serve(application=wsgi_app, host='127.0.0.1',
port=5607, server_version=server_version)
return server
if __name__ == '__main__':
sys.exit(main())

View File

@@ -0,0 +1,32 @@
# Copyright 2017 FUJITSU LIMITED
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Use this file for deploying the API under mod_wsgi.
"""
from paste import deploy
application = None
def main():
base_dir = '/etc/monasca/'
conf = '%slog-api-paste.ini' % base_dir
app = deploy.loadapp('config:%s' % conf)
return app
if __name__ == '__main__' or __name__.startswith('_mod_wsgi'):
application = main()

View File

@@ -0,0 +1,70 @@
# Copyright 2017 FUJITSU LIMITED
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
import pkgutil
from oslo_config import cfg
from oslo_log import log
from oslo_utils import importutils
CONF = cfg.CONF
LOG = log.getLogger(__name__)
def load_conf_modules():
"""Loads all modules that contain configuration
Method iterates over modules of :py:module:`monasca_log_api.conf`
and imports only those that contain following methods:
- list_opts (required by oslo_config.genconfig)
- register_opts (required by :py:currentmodule:)
"""
for modname in _list_module_names():
mod = importutils.import_module('monasca_log_api.conf.' + modname)
required_funcs = ['register_opts', 'list_opts']
for func in required_funcs:
if hasattr(mod, func):
yield mod
def _list_module_names():
package_path = os.path.dirname(os.path.abspath(__file__))
for _, modname, ispkg in pkgutil.iter_modules(path=[package_path]):
if not (modname == "opts" and ispkg):
yield modname
def register_opts():
"""Registers all conf modules opts
This method allows different modules to register
opts according to their needs.
"""
for mod in load_conf_modules():
mod.register_opts(CONF)
def list_opts():
"""Lists all conf modules opts.
Goes through all conf modules and yields their opts
"""
for mod in load_conf_modules():
mod_opts = mod.list_opts()
yield mod_opts[0], mod_opts[1]

View File

@@ -0,0 +1,36 @@
# Copyright 2017 FUJITSU LIMITED
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
kafka_check_opts = [
cfg.StrOpt('kafka_url',
required=True,
help='Url to kafka server'),
cfg.ListOpt('kafka_topics',
required=True,
default=['logs'],
help='Verify existence of configured topics')
]
kafka_check_group = cfg.OptGroup(name='kafka_healthcheck',
title='kafka_healthcheck')
def register_opts(conf):
conf.register_group(kafka_check_group)
conf.register_opts(kafka_check_opts, kafka_check_group)
def list_opts():
return kafka_check_group, kafka_check_opts

View File

@@ -0,0 +1,41 @@
# Copyright 2017 FUJITSU LIMITED
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
_MAX_MESSAGE_SIZE = 1048576
log_publisher_opts = [
cfg.StrOpt('kafka_url',
required=True,
help='Url to kafka server'),
cfg.MultiStrOpt('topics',
default=['logs'],
help='Consumer topics'),
cfg.IntOpt('max_message_size',
default=_MAX_MESSAGE_SIZE,
required=True,
help=('Message max size that can be sent '
'to kafka, default to %d bytes' % _MAX_MESSAGE_SIZE))
]
log_publisher_group = cfg.OptGroup(name='log_publisher', title='log_publisher')
def register_opts(conf):
conf.register_group(log_publisher_group)
conf.register_opts(log_publisher_opts, log_publisher_group)
def list_opts():
return log_publisher_group, log_publisher_opts

View File

@@ -0,0 +1,49 @@
# Copyright 2017 FUJITSU LIMITED
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
_DEFAULT_HOST = '127.0.0.1'
_DEFAULT_PORT = 8125
_DEFAULT_BUFFER_SIZE = 50
monitoring_opts = [
cfg.BoolOpt('enable',
default=True,
help='Determine if self monitoring is enabled'),
cfg.HostAddressOpt('statsd_host',
default=_DEFAULT_HOST,
help=('IP address or host domain name of statsd server, default to %s'
% _DEFAULT_HOST)),
cfg.PortOpt('statsd_port',
default=_DEFAULT_PORT,
help='Port of statsd server, default to %d' % _DEFAULT_PORT),
cfg.IntOpt('statsd_buffer',
default=_DEFAULT_BUFFER_SIZE,
required=True,
help=('Maximum number of metric to buffer before sending, '
'default to %d' % _DEFAULT_BUFFER_SIZE)),
cfg.DictOpt('dimensions', default={},
required=False, help='Additional dimensions that can be set')
]
monitoring_group = cfg.OptGroup(name='monitoring', title='monitoring')
def register_opts(conf):
conf.register_group(monitoring_group)
conf.register_opts(monitoring_opts, monitoring_group)
def list_opts():
return monitoring_group, monitoring_opts

View File

@@ -0,0 +1,47 @@
# Copyright 2017 FUJITSU LIMITED
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
role_m_opts = [
cfg.ListOpt(name='path',
default='/',
help='List of paths where middleware applies to'),
cfg.ListOpt(name='default_roles',
default=['monasca-user'],
help='List of roles allowed to enter api'),
cfg.ListOpt(name='agent_roles',
default=None,
help=('List of roles, that if set, mean that request '
'comes from agent, thus is authorized in the same '
'time')),
cfg.ListOpt(name='delegate_roles',
default=['admin'],
help=('Roles that are allowed to POST logs on '
'behalf of another tenant (project)')),
cfg.ListOpt(name='check_roles',
default=['@'],
help=('Roles that are allowed to do check '
'version and health'))
]
role_m_group = cfg.OptGroup(name='roles_middleware', title='roles_middleware')
def register_opts(conf):
conf.register_group(role_m_group)
conf.register_opts(role_m_opts, role_m_group)
def list_opts():
return role_m_group, role_m_opts

View File

@@ -0,0 +1,37 @@
# Copyright 2017 FUJITSU LIMITED
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
_DEFAULT_MAX_LOG_SIZE = 1024 * 1024
service_opts = [
cfg.StrOpt('region',
default=None,
help='Region'),
cfg.IntOpt('max_log_size',
default=_DEFAULT_MAX_LOG_SIZE,
help=('Refers to payload/envelope size. If either is exceeded'
'API will throw an error'))
]
service_group = cfg.OptGroup(name='service', title='service')
def register_opts(conf):
conf.register_group(service_group)
conf.register_opts(service_opts, service_group)
def list_opts():
return service_group, service_opts

View File

@@ -0,0 +1,82 @@
# Copyright 2017 FUJITSU LIMITED
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import sys
from monasca_log_api import conf
from monasca_log_api import version
from oslo_config import cfg
from oslo_log import log
from oslo_policy import opts as policy_opts
CONF = conf.CONF
LOG = log.getLogger(__name__)
_CONF_LOADED = False
_GUNICORN_MARKER = 'gunicorn'
def _is_running_under_gunicorn():
"""Evaluates if api runs under gunicorn"""
content = filter(lambda x: x != sys.executable and _GUNICORN_MARKER in x,
sys.argv or [])
return len(list(content) if not isinstance(content, list) else content) > 0
def get_config_files():
"""Get the possible configuration files accepted by oslo.config
This also includes the deprecated ones
"""
# default files
conf_files = cfg.find_config_files(project='monasca',
prog='monasca-log-api')
# deprecated config files (only used if standard config files are not there)
if len(conf_files) == 0:
old_conf_files = cfg.find_config_files(project='monasca',
prog='log-api')
if len(old_conf_files) > 0:
LOG.warning('Found deprecated old location "{}" '
'of main configuration file'.format(old_conf_files))
conf_files += old_conf_files
return conf_files
def parse_args(argv=None):
global _CONF_LOADED
if _CONF_LOADED:
LOG.debug('Configuration has been already loaded')
return
log.set_defaults()
log.register_options(CONF)
argv = (argv if argv is not None else sys.argv[1:])
args = ([] if _is_running_under_gunicorn() else argv or [])
CONF(args=args,
prog=sys.argv[1:],
project='monasca',
version=version.version_str,
default_config_files=get_config_files(),
description='RESTful API to collect log files')
log.setup(CONF,
product_name='monasca-log-api',
version=version.version_str)
conf.register_opts()
policy_opts.set_defaults(CONF)
_CONF_LOADED = True

View File

View File

@@ -0,0 +1,43 @@
# Copyright 2017 StackHPC
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import collections
class Dimension(collections.namedtuple('Dimension', 'name values')):
"""Representation of dimension names and optional values list.
Named-tuple type to represent the pairing of a dimension name and an
optional list of values.
:ivar name: Name of the dimension to reference.
:ivar values: Optional list of values associated with the dimension.
:vartype name: str
:vartype values: None or list[str]
"""
class SortBy(collections.namedtuple('SortBy', 'field direction')):
"""Representation of an individual sorting directive.
Named-tuple type to represent a directive for indicating how a result set
should be sorted.
:ivar field: Name of the field which is provides the values to sort by.
:ivar direction: Either 'asc' or 'desc' specifying the order of values.
:vartype name: str
:vartype values: str
"""

View File

View File

@@ -0,0 +1,74 @@
# Copyright 2017 StackHPC
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import abc
import six
@six.add_metaclass(abc.ABCMeta)
class LogsRepository(object):
def __init__(self):
super(LogsRepository, self).__init__()
@abc.abstractmethod
def list_logs(self, tenant_id, dimensions, start_time, end_time, offset,
limit, sort_by):
"""Obtain log listing based on simple criteria of dimension values.
Performs queries on the underlying log storage against a time range and
set of dimension values. Additionally, it is possible to optionally
sort results by timestamp.
:param tenant_id:
Tenant/project id for which to obtain logs (required).
:param dimensions:
List of Dimension tuples containing pairs of dimension names and
optional lists of dimension values. These will be used to filter
the logs returned. If no dimensions are specified, then no
filtering is performed. When multiple values are given, the
dimension must match any of the given values. If None is given,
logs with any value for the dimension will be returned.
:param start_time:
Optional starting time in UNIX time (seconds, inclusive).
:param end_time:
Optional ending time in UNIX time (seconds, inclusive).
:param offset:
Number of matching results to skip past, if specified.
:param limit:
Number of matching results to return (required).
:param sort_by:
List of SortBy tuples specifying fields to sort by and the
direction to sort the result set by. e.g. ('timestamp','asc'). The
direction is specified by either the string 'asc' for ascending
direction, or 'desc' for descending. If not specified, no order
must be enforced and the implementation is free to choose the most
efficient method to return the results.
:type tenant_id: str
:type dimensions: None or list[Dimension[str, list[str] or None]]
:type start_time: None or int
:type end_time: None or int
:type offset: None or int
:type limit: int
:type sort_by: None or list[SortBy[str, str]]
:return:
Log messages matching the given criteria. The dict representing
each message entry will contain attributes extracted from the
underlying structure; 'message', 'timestamp' and 'dimensions'.
:rtype: list[dict]
"""
pass

View File

@@ -0,0 +1 @@
"""Base package for monasca-log-api healthcheck"""

View File

@@ -0,0 +1,98 @@
# Copyright 2015-2017 FUJITSU LIMITED
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import collections
from monasca_common.kafka_lib import client
from monasca_log_api import conf
from oslo_log import log
from six import PY3
LOG = log.getLogger(__name__)
CONF = conf.CONF
CheckResult = collections.namedtuple('CheckResult', ['healthy', 'message'])
"""Result from the healthcheck, contains healthy(boolean) and message"""
# TODO(feature) monasca-common candidate
class KafkaHealthCheck(object):
"""Evaluates kafka health
Healthcheck verifies if:
* kafka server is up and running
* there is a configured topic in kafka
If following conditions are met healthcheck returns healthy status.
Otherwise unhealthy status is returned with explanation.
Example of middleware configuration:
.. code-block:: ini
[kafka_healthcheck]
kafka_url = localhost:8900
kafka_topics = log
Note:
It is possible to specify multiple topics if necessary.
Just separate them with ,
"""
def healthcheck(self):
url = CONF.kafka_healthcheck.kafka_url
try:
kafka_client = client.KafkaClient(hosts=url)
except client.KafkaUnavailableError as ex:
LOG.error(repr(ex))
error_str = 'Could not connect to kafka at %s' % url
return CheckResult(healthy=False, message=error_str)
result = self._verify_topics(kafka_client)
self._disconnect_gracefully(kafka_client)
return result
# noinspection PyMethodMayBeStatic
def _verify_topics(self, kafka_client):
topics = CONF.kafka_healthcheck.kafka_topics
if PY3:
topics = tuple(topic.encode('utf-8') for topic in topics)
for t in topics:
# kafka client loads metadata for topics as fast
# as possible (happens in __init__), therefore this
# topic_partitions is sure to be filled
for_topic = t in kafka_client.topic_partitions
if not for_topic:
error_str = 'Kafka: Topic %s not found' % t
LOG.error(error_str)
return CheckResult(healthy=False, message=error_str)
return CheckResult(healthy=True, message='OK')
# noinspection PyMethodMayBeStatic
def _disconnect_gracefully(self, kafka_client):
# at this point, client is connected so it must be closed
# regardless of topic existence
try:
kafka_client.close()
except Exception as ex:
# log that something went wrong and move on
LOG.error(repr(ex))

View File

@@ -0,0 +1,152 @@
# Copyright 2015-2017 FUJITSU LIMITED
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from monasca_log_api import conf
from oslo_log import log
from oslo_middleware import base as om
from webob import response
CONF = conf.CONF
LOG = log.getLogger(__name__)
_X_IDENTITY_STATUS = 'X-Identity-Status'
_X_ROLES = 'X-Roles'
_X_MONASCA_LOG_AGENT = 'X-MONASCA-LOG-AGENT'
_CONFIRMED_STATUS = 'Confirmed'
def _ensure_lower_roles(roles):
if not roles:
return []
return [role.strip().lower() for role in roles]
def _intersect(a, b):
return list(set(a) & set(b))
class RoleMiddleware(om.ConfigurableMiddleware):
"""Authorization middleware for X-Roles header.
RoleMiddleware is responsible for authorizing user's
access against **X-Roles** header. Middleware
expects authentication to be completed (i.e. keystone middleware
has been already called).
If tenant is authenticated and authorized middleware
exits silently (that is considered a success). Otherwise
middleware produces JSON response according to following schema
.. code-block:: javascript
{
'title': u'Unauthorized',
'message': explanation (str)
}
Configuration example
.. code-block:: cfg
[roles_middleware]
path = /v2.0/log
default_roles = monasca-user
agent_roles = monasca-log-agent
delegate_roles = admin
Configuration explained:
* path (list) - path (or list of paths) middleware should be applied
* agent_roles (list) - list of roles that identifies tenant as an agent
* default_roles (list) - list of roles that should be authorized
* delegate_roles (list) - list of roles that are allowed to POST logs on
behalf of another tenant (project)
Note:
Being an agent means that tenant is automatically authorized.
Note:
Middleware works only for configured paths and for all
requests apart from HTTP method **OPTIONS**.
"""
def __init__(self, application, conf=None):
super(RoleMiddleware, self).__init__(application, conf)
middleware = CONF.roles_middleware
self._path = middleware.path
self._default_roles = _ensure_lower_roles(middleware.default_roles)
self._agent_roles = _ensure_lower_roles(middleware.agent_roles)
LOG.debug('RolesMiddleware initialized for paths=%s', self._path)
def process_request(self, req):
if not self._can_apply_middleware(req):
LOG.debug('%s skipped in role middleware', req.path)
return None
is_authenticated = self._is_authenticated(req)
is_agent = self._is_agent(req)
tenant_id = req.headers.get('X-Tenant-Id')
req.environ[_X_MONASCA_LOG_AGENT] = is_agent
LOG.debug('%s is authenticated=%s, log_agent=%s',
tenant_id, is_authenticated, is_agent)
if is_authenticated:
LOG.debug('%s has been authenticated', tenant_id)
return # do return nothing to enter API internal
explanation = u'Failed to authenticate request for %s' % tenant_id
LOG.error(explanation)
json_body = {u'title': u'Unauthorized', u'message': explanation}
return response.Response(status=401,
json_body=json_body,
content_type='application/json')
def _is_agent(self, req):
headers = req.headers
roles = headers.get(_X_ROLES)
if not roles:
LOG.warning('Couldn\'t locate %s header,or it was empty', _X_ROLES)
return False
else:
roles = _ensure_lower_roles(roles.split(','))
is_agent = len(_intersect(roles, self._agent_roles)) > 0
return is_agent
def _is_authenticated(self, req):
headers = req.headers
if _X_IDENTITY_STATUS in headers:
status = req.headers.get(_X_IDENTITY_STATUS)
return _CONFIRMED_STATUS == status
return False
def _can_apply_middleware(self, req):
path = req.path
method = req.method
if method == 'OPTIONS':
return False
if self._path:
for p in self._path:
if path.startswith(p):
return True
return False # if no configured paths, or nothing matches

View File

@@ -0,0 +1,78 @@
# Copyright 2016-2017 FUJITSU LIMITED
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import monascastatsd
from monasca_log_api import conf
from oslo_log import log
LOG = log.getLogger(__name__)
CONF = conf.CONF
_CLIENT_NAME = 'monasca'
_DEFAULT_DIMENSIONS = {
'service': 'monitoring',
'component': 'monasca-log-api'
}
def get_client(dimensions=None):
"""Creates statsd client
Creates monasca-statsd client using configuration from
config file and supplied dimensions.
Configuration is composed out of ::
[monitoring]
statsd_host = 192.168.10.4
statsd_port = 8125
statsd_buffer = 50
Dimensions are appended to following dictionary ::
{
'service': 'monitoring',
'component': 'monasca-log-api'
}
Note:
Passed dimensions do not override those specified in
dictionary above
:param dict dimensions: Optional dimensions
:return: statsd client
:rtype: monascastatsd.Client
"""
dims = _DEFAULT_DIMENSIONS.copy()
if dimensions:
for key, val in dimensions.items():
if key not in _DEFAULT_DIMENSIONS:
dims[key] = val
else:
LOG.warning('Cannot override fixed dimension %s=%s', key,
_DEFAULT_DIMENSIONS[key])
connection = monascastatsd.Connection(
host=CONF.monitoring.statsd_host,
port=CONF.monitoring.statsd_port,
max_buffer_size=CONF.monitoring.statsd_buffer
)
client = monascastatsd.Client(name=_CLIENT_NAME,
connection=connection,
dimensions=dims)
LOG.debug('Created statsd client %s[%s] = %s:%d', _CLIENT_NAME, dims,
CONF.monitoring.statsd_host, CONF.monitoring.statsd_port)
return client

View File

@@ -0,0 +1,47 @@
# Copyright 2016 FUJITSU LIMITED
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
LOGS_RECEIVED_METRIC = 'log.in_logs'
"""Metrics sent with amount of logs (not requests) API receives"""
LOGS_REJECTED_METRIC = 'log.in_logs_rejected'
"""Metric sent with amount of logs that were rejected
(i.e. invalid dimension)"""
LOGS_BULKS_REJECTED_METRIC = 'log.in_bulks_rejected'
"""Metric sent with amount of bulk packages that were rejected due
to early stage validation (content-length, content-type).
Only valid for v3.0.
"""
LOGS_RECEIVED_BYTE_SIZE_METRICS = 'log.in_logs_bytes'
"""Metric sent with size of payloads(a.k.a. Content-Length)
(in bytes) API receives"""
LOGS_PROCESSING_TIME_METRIC = 'log.processing_time_ms'
"""Metric sent with time that log-api needed to process each received log.
Metric does not include time needed to authorize requests."""
LOGS_PUBLISHED_METRIC = 'log.out_logs'
"""Metric sent with amount of logs published to kafka"""
LOGS_PUBLISHED_LOST_METRIC = 'log.out_logs_lost'
"""Metric sent with amount of logs that were lost due to critical error in
publish phase."""
LOGS_PUBLISH_TIME_METRIC = 'log.publish_time_ms'
"""Metric sent with time that publishing took"""
LOGS_TRUNCATED_METRIC = 'log.out_logs_truncated_bytes'
"""Metric sent with amount of truncated bytes from log message"""

View File

@@ -0,0 +1,73 @@
# Copyright 2017 FUJITSU LIMITED
# Copyright 2018 OP5 AB
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
import pkgutil
from monasca_log_api.conf import role_middleware
from oslo_config import cfg
from oslo_log import log
from oslo_utils import importutils
LOG = log.getLogger(__name__)
_BASE_MOD_PATH = 'monasca_log_api.policies.'
CONF = cfg.CONF
def roles_list_to_check_str(roles_list):
if roles_list:
converted_roles_list = ["role:" + role if role != '@' else role for role in roles_list]
return ' or '.join(converted_roles_list)
else:
return None
role_middleware.register_opts(CONF)
def load_policy_modules():
"""Load all modules that contain policies.
Method iterates over modules of :py:mod:`monasca_events_api.policies`
and imports only those that contain following methods:
- list_rules
"""
for modname in _list_module_names():
mod = importutils.import_module(_BASE_MOD_PATH + modname)
if hasattr(mod, 'list_rules'):
yield mod
def _list_module_names():
package_path = os.path.dirname(os.path.abspath(__file__))
for _, modname, ispkg in pkgutil.iter_modules(path=[package_path]):
if not (modname == "opts" and ispkg):
yield modname
def list_rules():
"""List all policy modules rules.
Goes through all policy modules and yields their rules
"""
all_rules = []
for mod in load_policy_modules():
rules = mod.list_rules()
all_rules.extend(rules)
return all_rules

View File

@@ -0,0 +1,44 @@
# Copyright 2018 OP5 AB
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from monasca_log_api import policies
from oslo_config import cfg
from oslo_policy import policy
CHECK_AUTHORIZED_ROLES = policies.roles_list_to_check_str(
cfg.CONF.roles_middleware.check_roles)
rules = [
policy.DocumentedRuleDefault(
name='log_api:healthcheck:head',
check_str=CHECK_AUTHORIZED_ROLES,
description='Healthcheck head rule',
operations=[
{'path': '/healthcheck', 'method': 'HEAD'}
]
),
policy.DocumentedRuleDefault(
name='log_api:healthcheck:get',
check_str=CHECK_AUTHORIZED_ROLES,
description='Healthcheck get rule',
operations=[
{'path': '/healthcheck', 'method': 'GET'}
]
),
]
def list_rules():
return rules

View File

@@ -0,0 +1,43 @@
# Copyright 2018 OP5 AB
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from monasca_log_api import policies
from oslo_config import cfg
from oslo_policy import policy
DEFAULT_AUTHORIZED_ROLES = policies.roles_list_to_check_str(
cfg.CONF.roles_middleware.default_roles)
AGENT_AUTHORIZED_ROLES = policies.roles_list_to_check_str(
cfg.CONF.roles_middleware.agent_roles)
DELEGATE_AUTHORIZED_ROLES = policies.roles_list_to_check_str(
cfg.CONF.roles_middleware.delegate_roles)
rules = [
policy.DocumentedRuleDefault(
name='log_api:logs:post',
check_str=' or '.join(filter(None, [AGENT_AUTHORIZED_ROLES,
DEFAULT_AUTHORIZED_ROLES,
DELEGATE_AUTHORIZED_ROLES])),
description='Logs post rule',
operations=[
{'path': '/logs', 'method': 'POST'},
{'path': '/log/single', 'method': 'POST'}
]
)
]
def list_rules():
return rules

View File

@@ -0,0 +1,38 @@
# Copyright 2018 OP5 AB
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from monasca_log_api import policies
from oslo_config import cfg
from oslo_policy import policy
CHECK_AUTHORIZED_ROLES = policies.roles_list_to_check_str(
cfg.CONF.roles_middleware.check_roles)
rules = [
policy.DocumentedRuleDefault(
name='log_api:versions:get',
check_str=CHECK_AUTHORIZED_ROLES,
description='Versions get rule',
operations=[
{'path': '/', 'method': 'GET'},
{'path': '/version', 'method': 'GET'},
{'path': '/version/{version_id}', 'method': 'GET'}
]
)
]
def list_rules():
return rules

View File

View File

@@ -0,0 +1,220 @@
# coding=utf-8
# Copyright 2015 kornicameister@gmail.com
# Copyright 2015-2017 FUJITSU LIMITED
# Copyright 2018 OP5 AB
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import codecs
import os
import random
import string
import falcon
import fixtures
import mock
import six
from falcon import testing
from monasca_common.policy import policy_engine as policy
from monasca_log_api import conf
from monasca_log_api import config
from monasca_log_api import policies
from monasca_log_api.app.base import request
from oslo_config import fixture as oo_cfg
from oslo_context import fixture as oo_ctx
from oslo_serialization import jsonutils
from oslotest import base as oslotest_base
policy.POLICIES = policies
class MockedAPI(falcon.API):
"""MockedAPI
Subclasses :py:class:`falcon.API` in order to overwrite
request_type property with custom :py:class:`request.Request`
"""
def __init__(self):
super(MockedAPI, self).__init__(
media_type=falcon.DEFAULT_MEDIA_TYPE,
request_type=request.Request,
response_type=falcon.Response,
middleware=None,
router=None
)
def generate_unique_message(size):
letters = string.ascii_letters
def rand(amount, space=True):
space = ' ' if space else ''
return ''.join((random.choice(letters + space) for _ in range(amount)))
return rand(size)
def _hex_to_unicode(hex_raw):
hex_raw = six.b(hex_raw.replace(' ', ''))
hex_str_raw = codecs.getdecoder('hex')(hex_raw)[0]
hex_str = hex_str_raw.decode('utf-8', 'replace')
return hex_str
# NOTE(trebskit) => http://www.cl.cam.ac.uk/~mgk25/ucs/examples/UTF-8-test.txt
UNICODE_MESSAGES = [
# Unicode is evil...
{'case': 'arabic', 'input': 'يونيكود هو الشر'},
{'case': 'polish', 'input': 'Unicode to zło'},
{'case': 'greek', 'input': 'Unicode είναι κακό'},
{'case': 'portuguese', 'input': 'Unicode é malvado'},
{'case': 'lao', 'input': 'unicode ເປັນຄວາມຊົ່ວຮ້າຍ'},
{'case': 'german', 'input': 'Unicode ist böse'},
{'case': 'japanese', 'input': 'ユニコードは悪です'},
{'case': 'russian', 'input': 'Unicode - зло'},
{'case': 'urdu', 'input': 'یونیسیڈ برائی ہے'},
{'case': 'weird', 'input': '🆄🅽🅸🅲🅾🅳🅴 🅸🆂 🅴🆅🅸🅻...'}, # funky, huh ?
# conditions from link above
# 2.3 Other boundary conditions
{'case': 'stress_2_3_1', 'input': _hex_to_unicode('ed 9f bf')},
{'case': 'stress_2_3_2', 'input': _hex_to_unicode('ee 80 80')},
{'case': 'stress_2_3_3', 'input': _hex_to_unicode('ef bf bd')},
{'case': 'stress_2_3_4', 'input': _hex_to_unicode('f4 8f bf bf')},
{'case': 'stress_2_3_5', 'input': _hex_to_unicode('f4 90 80 80')},
# 3.5 Impossible byes
{'case': 'stress_3_5_1', 'input': _hex_to_unicode('fe')},
{'case': 'stress_3_5_2', 'input': _hex_to_unicode('ff')},
{'case': 'stress_3_5_3', 'input': _hex_to_unicode('fe fe ff ff')},
# 4.1 Examples of an overlong ASCII character
{'case': 'stress_4_1_1', 'input': _hex_to_unicode('c0 af')},
{'case': 'stress_4_1_2', 'input': _hex_to_unicode('e0 80 af')},
{'case': 'stress_4_1_3', 'input': _hex_to_unicode('f0 80 80 af')},
{'case': 'stress_4_1_4', 'input': _hex_to_unicode('f8 80 80 80 af')},
{'case': 'stress_4_1_5', 'input': _hex_to_unicode('fc 80 80 80 80 af')},
# 4.2 Maximum overlong sequences
{'case': 'stress_4_2_1', 'input': _hex_to_unicode('c1 bf')},
{'case': 'stress_4_2_2', 'input': _hex_to_unicode('e0 9f bf')},
{'case': 'stress_4_2_3', 'input': _hex_to_unicode('f0 8f bf bf')},
{'case': 'stress_4_2_4', 'input': _hex_to_unicode('f8 87 bf bf bf')},
{'case': 'stress_4_2_5', 'input': _hex_to_unicode('fc 83 bf bf bf bf')},
# 4.3 Overlong representation of the NUL character
{'case': 'stress_4_3_1', 'input': _hex_to_unicode('c0 80')},
{'case': 'stress_4_3_2', 'input': _hex_to_unicode('e0 80 80')},
{'case': 'stress_4_3_3', 'input': _hex_to_unicode('f0 80 80 80')},
{'case': 'stress_4_3_4', 'input': _hex_to_unicode('f8 80 80 80 80')},
{'case': 'stress_4_3_5', 'input': _hex_to_unicode('fc 80 80 80 80 80')},
# and some cheesy example from polish novel 'Pan Tadeusz'
{'case': 'mr_t', 'input': 'Hajże na Soplicę!'},
# it won't be complete without that one
{'case': 'mr_b', 'input': 'Grzegorz Brzęczyszczykiewicz, '
'Chrząszczyżewoszyce, powiat Łękołody'},
# great success, christmas time
{'case': 'olaf', 'input': ''}
]
class DisableStatsdFixture(fixtures.Fixture):
def setUp(self):
super(DisableStatsdFixture, self).setUp()
statsd_patch = mock.patch('monascastatsd.Connection')
statsd_patch.start()
self.addCleanup(statsd_patch.stop)
class ConfigFixture(oo_cfg.Config):
"""Mocks configuration"""
def __init__(self):
super(ConfigFixture, self).__init__(config.CONF)
def setUp(self):
super(ConfigFixture, self).setUp()
self.addCleanup(self._clean_config_loaded_flag)
conf.register_opts()
self._set_defaults()
config.parse_args(argv=[]) # prevent oslo from parsing test args
@staticmethod
def _clean_config_loaded_flag():
config._CONF_LOADED = False
def _set_defaults(self):
self.conf.set_default('kafka_url', '127.0.0.1', 'kafka_healthcheck')
self.conf.set_default('kafka_url', '127.0.0.1', 'log_publisher')
class PolicyFixture(fixtures.Fixture):
"""Override the policy with a completely new policy file.
This overrides the policy with a completely fake and synthetic
policy file.
"""
def setUp(self):
super(PolicyFixture, self).setUp()
self._prepare_policy()
policy.reset()
policy.init()
def _prepare_policy(self):
policy_dir = self.useFixture(fixtures.TempDir())
policy_file = os.path.join(policy_dir.path, 'policy.yaml')
# load the fake_policy data and add the missing default rules.
policy_rules = jsonutils.loads('{}')
self.add_missing_default_rules(policy_rules)
with open(policy_file, 'w') as f:
jsonutils.dump(policy_rules, f)
BaseTestCase.conf_override(policy_file=policy_file, group='oslo_policy')
BaseTestCase.conf_override(policy_dirs=[], group='oslo_policy')
@staticmethod
def add_missing_default_rules(rules):
for rule in policies.list_rules():
if rule.name not in rules:
rules[rule.name] = rule.check_str
class BaseTestCase(oslotest_base.BaseTestCase):
def setUp(self):
super(BaseTestCase, self).setUp()
self.useFixture(ConfigFixture())
self.useFixture(DisableStatsdFixture())
self.useFixture(oo_ctx.ClearRequestContext())
self.useFixture(PolicyFixture())
@staticmethod
def conf_override(**kw):
"""Override flag variables for a test."""
group = kw.pop('group', None)
for k, v in kw.items():
config.CONF.set_override(k, v, group)
@staticmethod
def conf_default(**kw):
"""Override flag variables for a test."""
group = kw.pop('group', None)
for k, v in kw.items():
config.CONF.set_default(k, v, group)
class BaseApiTestCase(BaseTestCase, testing.TestBase):
api_class = MockedAPI

View File

@@ -0,0 +1,40 @@
# Copyright 2017 FUJITSU LIMITED
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from monasca_log_api import config
from monasca_log_api.tests import base
class TestConfig(base.BaseTestCase):
@mock.patch('monasca_log_api.config.sys')
def test_should_return_true_if_runs_under_gunicorn(self, sys_patch):
sys_patch.argv = [
'/bin/gunicorn',
'--capture-output',
'--paste',
'etc/monasca/log-api-paste.ini',
'--workers',
'1'
]
sys_patch.executable = '/bin/python'
self.assertTrue(config._is_running_under_gunicorn())
@mock.patch('monasca_log_api.config.sys')
def test_should_return_false_if_runs_without_gunicorn(self, sys_patch):
sys_patch.argv = ['/bin/monasca-log-api']
sys_patch.executable = '/bin/python'
self.assertFalse(config._is_running_under_gunicorn())

View File

@@ -0,0 +1,75 @@
# Copyright 2016 FUJITSU LIMITED
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import falcon
import mock
import simplejson as json
from monasca_log_api.app.controller import healthchecks
from monasca_log_api.healthcheck import kafka_check as healthcheck
from monasca_log_api.tests import base
ENDPOINT = '/healthcheck'
class TestApiHealthChecks(base.BaseApiTestCase):
def before(self):
self.resource = healthchecks.HealthChecks()
self.api.add_route(
ENDPOINT,
self.resource
)
def test_should_return_200_for_head(self):
self.simulate_request(ENDPOINT, method='HEAD')
self.assertEqual(falcon.HTTP_NO_CONTENT, self.srmock.status)
@mock.patch('monasca_log_api.healthcheck.kafka_check.KafkaHealthCheck')
def test_should_report_healthy_if_kafka_healthy(self, kafka_check):
kafka_check.healthcheck.return_value = healthcheck.CheckResult(True,
'OK')
self.resource._kafka_check = kafka_check
ret = self.simulate_request(ENDPOINT,
headers={
'Content-Type': 'application/json'
},
decode='utf8',
method='GET')
self.assertEqual(falcon.HTTP_OK, self.srmock.status)
ret = json.loads(ret)
self.assertIn('kafka', ret)
self.assertEqual('OK', ret.get('kafka'))
@mock.patch('monasca_log_api.healthcheck.kafka_check.KafkaHealthCheck')
def test_should_report_unhealthy_if_kafka_healthy(self, kafka_check):
url = 'localhost:8200'
err_str = 'Could not connect to kafka at %s' % url
kafka_check.healthcheck.return_value = healthcheck.CheckResult(False,
err_str)
self.resource._kafka_check = kafka_check
ret = self.simulate_request(ENDPOINT,
headers={
'Content-Type': 'application/json'
},
decode='utf8',
method='GET')
self.assertEqual(falcon.HTTP_SERVICE_UNAVAILABLE, self.srmock.status)
ret = json.loads(ret)
self.assertIn('kafka', ret)
self.assertEqual(err_str, ret.get('kafka'))

View File

@@ -0,0 +1,71 @@
# Copyright 2015-2017 FUJITSU LIMITED
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from monasca_common.kafka_lib import client
from monasca_log_api.healthcheck import kafka_check as kc
from monasca_log_api.tests import base
class KafkaCheckLogicTest(base.BaseTestCase):
mock_kafka_url = 'localhost:1234'
mocked_topics = ['test_1', 'test_2']
mock_config = {
'kafka_url': mock_kafka_url,
'kafka_topics': mocked_topics
}
def setUp(self):
super(KafkaCheckLogicTest, self).setUp()
self.conf_default(group='kafka_healthcheck', **self.mock_config)
@mock.patch('monasca_log_api.healthcheck.kafka_check.client.KafkaClient')
def test_should_fail_kafka_unavailable(self, kafka_client):
kafka_client.side_effect = client.KafkaUnavailableError()
kafka_health = kc.KafkaHealthCheck()
result = kafka_health.healthcheck()
self.assertFalse(result.healthy)
@mock.patch('monasca_log_api.healthcheck.kafka_check.client.KafkaClient')
def test_should_fail_topic_missing(self, kafka_client):
kafka = mock.Mock()
kafka.topic_partitions = [self.mocked_topics[0]]
kafka_client.return_value = kafka
kafka_health = kc.KafkaHealthCheck()
result = kafka_health.healthcheck()
# verify result
self.assertFalse(result.healthy)
# ensure client was closed
self.assertTrue(kafka.close.called)
@mock.patch('monasca_log_api.healthcheck.kafka_check.client.KafkaClient')
def test_should_pass(self, kafka_client):
kafka = mock.Mock()
kafka.topic_partitions = self.mocked_topics
kafka_client.return_value = kafka
kafka_health = kc.KafkaHealthCheck()
result = kafka_health.healthcheck()
self.assertTrue(result)
# ensure client was closed
self.assertTrue(kafka.close.called)

View File

@@ -0,0 +1,294 @@
# Copyright 2015 kornicameister@gmail.com
# Copyright 2016-2017 FUJITSU LIMITED
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
import datetime
import random
import ujson
import unittest
import mock
import six
from monasca_log_api.app.base import log_publisher
from monasca_log_api.app.base import model
from monasca_log_api.tests import base
from oslo_config import cfg
from oslo_log import log
LOG = log.getLogger(__name__)
EPOCH_START = datetime.datetime(1970, 1, 1)
class TestSendMessage(base.BaseTestCase):
@mock.patch('monasca_log_api.app.base.log_publisher.producer'
'.KafkaProducer')
def test_should_not_send_empty_message(self, _):
instance = log_publisher.LogPublisher()
instance._kafka_publisher = mock.Mock()
instance.send_message({})
self.assertFalse(instance._kafka_publisher.publish.called)
@unittest.expectedFailure
def test_should_not_send_message_not_dict(self):
instance = log_publisher.LogPublisher()
not_dict_value = 123
instance.send_message(not_dict_value)
@mock.patch('monasca_log_api.app.base.log_publisher.producer'
'.KafkaProducer')
def test_should_not_send_message_missing_keys(self, _):
# checks every combination of missing keys
# test does not rely on those keys having a value or not,
# it simply assumes that values are set but important
# message (i.e. envelope) properties are missing entirely
# that's why there are two loops instead of three
instance = log_publisher.LogPublisher()
keys = ['log', 'creation_time', 'meta']
for key_1 in keys:
diff = keys[:]
diff.remove(key_1)
for key_2 in diff:
message = {
key_1: random.randint(10, 20),
key_2: random.randint(30, 50)
}
self.assertRaises(log_publisher.InvalidMessageException,
instance.send_message,
message)
@mock.patch('monasca_log_api.app.base.log_publisher.producer'
'.KafkaProducer')
def test_should_not_send_message_missing_values(self, _):
# original message assumes that every property has value
# test modify each property one by one by removing that value
# (i.e. creating false-like value)
instance = log_publisher.LogPublisher()
message = {
'log': {
'message': '11'
},
'creation_time': 123456,
'meta': {
'region': 'pl'
}
}
for key in message:
tmp_message = message
tmp_message[key] = None
self.assertRaises(log_publisher.InvalidMessageException,
instance.send_message,
tmp_message)
@mock.patch('monasca_log_api.app.base.log_publisher.producer'
'.KafkaProducer')
def test_should_send_message(self, kafka_producer):
instance = log_publisher.LogPublisher()
instance._kafka_publisher = kafka_producer
instance.send_message({})
creation_time = ((datetime.datetime.utcnow() - EPOCH_START)
.total_seconds())
application_type = 'monasca-log-api'
dimension_1_name = 'disk_usage'
dimension_1_value = '50'
dimension_2_name = 'cpu_time'
dimension_2_value = '60'
msg = model.Envelope(
log={
'message': '1',
'application_type': application_type,
'dimensions': {
dimension_1_name: dimension_1_value,
dimension_2_name: dimension_2_value
}
},
meta={
'tenantId': '1'
}
)
msg['creation_time'] = creation_time
instance.send_message(msg)
instance._kafka_publisher.publish.assert_called_once_with(
cfg.CONF.log_publisher.topics[0],
[ujson.dumps(msg, ensure_ascii=False).encode('utf-8')])
@mock.patch('monasca_log_api.app.base.log_publisher.producer'
'.KafkaProducer')
def test_should_send_message_multiple_topics(self, _):
topics = ['logs', 'analyzer', 'tester']
self.conf_override(topics=topics,
max_message_size=5000,
group='log_publisher')
instance = log_publisher.LogPublisher()
instance._kafka_publisher = mock.Mock()
instance.send_message({})
creation_time = ((datetime.datetime.utcnow() - EPOCH_START)
.total_seconds())
dimension_1_name = 'disk_usage'
dimension_1_value = '50'
dimension_2_name = 'cpu_time'
dimension_2_value = '60'
application_type = 'monasca-log-api'
msg = model.Envelope(
log={
'message': '1',
'application_type': application_type,
'dimensions': {
dimension_1_name: dimension_1_value,
dimension_2_name: dimension_2_value
}
},
meta={
'tenantId': '1'
}
)
msg['creation_time'] = creation_time
json_msg = ujson.dumps(msg, ensure_ascii=False)
instance.send_message(msg)
self.assertEqual(len(topics),
instance._kafka_publisher.publish.call_count)
for topic in topics:
instance._kafka_publisher.publish.assert_any_call(
topic,
[json_msg.encode('utf-8')])
@mock.patch('monasca_log_api.app.base.log_publisher.producer'
'.KafkaProducer')
def test_should_send_unicode_message(self, kp):
instance = log_publisher.LogPublisher()
instance._kafka_publisher = kp
for um in base.UNICODE_MESSAGES:
case, msg = um.values()
try:
envelope = model.Envelope(
log={
'message': msg,
'application_type': 'test',
'dimensions': {
'test': 'test_log_publisher',
'case': 'test_should_send_unicode_message'
}
},
meta={
'tenantId': 1
}
)
instance.send_message(envelope)
expected_message = ujson.dumps(envelope, ensure_ascii=False)
if six.PY3:
expected_message = expected_message.encode('utf-8')
instance._kafka_publisher.publish.assert_called_with(
cfg.CONF.log_publisher.topics[0],
[expected_message]
)
except Exception:
LOG.exception('Failed to evaluate unicode case %s', case)
raise
@mock.patch(
'monasca_log_api.app.base.log_publisher.producer'
'.KafkaProducer')
class TestTruncation(base.BaseTestCase):
EXTRA_CHARS_SIZE = len(bytearray(ujson.dumps({
'log': {
'message': None
}
}), 'utf8')) - 2
def test_should_not_truncate_message_if_size_is_smaller(self, _):
diff_size = random.randint(1, 100)
self._run_truncate_test(log_size_factor=-diff_size,
truncate_by=0)
def test_should_not_truncate_message_if_size_equal_to_max(self, _):
self._run_truncate_test(log_size_factor=0,
truncate_by=0)
def test_should_truncate_too_big_message(self, _):
diff_size = random.randint(1, 100)
max_size = 1000
truncate_by = ((max_size -
(max_size - log_publisher._TRUNCATED_PROPERTY_SIZE)) +
log_publisher._TRUNCATION_SAFE_OFFSET + diff_size)
self._run_truncate_test(max_message_size=1000,
log_size_factor=diff_size,
truncate_by=truncate_by)
def _run_truncate_test(self,
max_message_size=1000,
log_size_factor=0,
truncate_by=0,
gen_fn=base.generate_unique_message):
log_size = (max_message_size -
TestTruncation.EXTRA_CHARS_SIZE -
log_publisher._KAFKA_META_DATA_SIZE -
log_publisher._TIMESTAMP_KEY_SIZE +
log_size_factor)
expected_log_message_size = log_size - truncate_by
self.conf_override(
group='log_publisher',
max_message_size=max_message_size
)
log_msg = gen_fn(log_size)
envelope = {
'log': {
'message': log_msg
}
}
instance = log_publisher.LogPublisher()
instance._logs_truncated_gauge.send = meter = mock.Mock()
envelope_copy = copy.deepcopy(envelope)
json_envelope = instance._truncate(envelope_copy)
parsed_envelope = ujson.loads(json_envelope)
parsed_log_message = parsed_envelope['log']['message']
parsed_log_message_len = len(parsed_log_message)
if truncate_by > 0:
self.assertNotEqual(envelope['log']['message'],
parsed_log_message)
else:
self.assertEqual(envelope['log']['message'],
parsed_log_message)
self.assertEqual(expected_log_message_size, parsed_log_message_len)
self.assertEqual(1, meter.call_count)
self.assertEqual(truncate_by, meter.mock_calls[0][2]['value'])

View File

@@ -0,0 +1,269 @@
# Copyright 2015 kornicameister@gmail.com
# Copyright 2016 FUJITSU LIMITED
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import falcon
import mock
from monasca_log_api.app.base import exceptions as log_api_exceptions
from monasca_log_api.app.controller.api import headers
from monasca_log_api.app.controller.v2 import logs
from monasca_log_api.tests import base
ROLES = 'admin'
def _init_resource(test):
resource = logs.Logs()
test.api.add_route('/log/single', resource)
return resource
class TestApiLogsVersion(base.BaseApiTestCase):
@mock.patch('monasca_log_api.app.base.log_publisher.LogPublisher')
@mock.patch('monasca_log_api.app.controller.v2.aid.service.LogCreator')
def test_should_return_v2_as_version(self, _, __):
logs_resource = logs.Logs()
self.assertEqual('v2.0', logs_resource.version)
class TestApiLogs(base.BaseApiTestCase):
@mock.patch('monasca_log_api.app.base.log_publisher.LogPublisher')
@mock.patch('monasca_log_api.app.controller.v2.aid.service.LogCreator')
def test_should_contain_deprecated_details_in_successful_response(self,
_,
__):
_init_resource(self)
self.simulate_request(
'/log/single',
method='POST',
headers={
headers.X_ROLES.name: ROLES,
headers.X_DIMENSIONS.name: 'a:1',
'Content-Type': 'application/json',
'Content-Length': '0'
}
)
self.assertEqual(falcon.HTTP_204, self.srmock.status)
self.assertIn('deprecated', self.srmock.headers_dict)
self.assertIn('link', self.srmock.headers_dict)
@mock.patch('monasca_log_api.app.base.log_publisher.LogPublisher')
@mock.patch('monasca_log_api.app.controller.v2.aid.service.LogCreator')
def test_should_fail_not_delegate_ok_cross_tenant_id(self, _, __):
_init_resource(self)
self.simulate_request(
'/log/single',
method='POST',
query_string='tenant_id=1',
headers={
'Content-Type': 'application/json',
'Content-Length': '0'
}
)
self.assertEqual(falcon.HTTP_401, self.srmock.status)
@mock.patch('monasca_log_api.app.controller.v2.aid.service.LogCreator')
@mock.patch('monasca_log_api.app.base.log_publisher.LogPublisher')
def test_should_pass_empty_cross_tenant_id_wrong_role(self,
log_creator,
kafka_publisher):
logs_resource = _init_resource(self)
logs_resource._log_creator = log_creator
logs_resource._kafka_publisher = kafka_publisher
self.simulate_request(
'/log/single',
method='POST',
headers={
headers.X_ROLES.name: ROLES,
headers.X_DIMENSIONS.name: 'a:1',
'Content-Type': 'application/json',
'Content-Length': '0'
}
)
self.assertEqual(falcon.HTTP_204, self.srmock.status)
self.assertEqual(1, kafka_publisher.send_message.call_count)
self.assertEqual(1, log_creator.new_log.call_count)
self.assertEqual(1, log_creator.new_log_envelope.call_count)
@mock.patch('monasca_log_api.app.controller.v2.aid.service.LogCreator')
@mock.patch('monasca_log_api.app.base.log_publisher.LogPublisher')
def test_should_pass_empty_cross_tenant_id_ok_role(self,
log_creator,
kafka_publisher):
logs_resource = _init_resource(self)
logs_resource._log_creator = log_creator
logs_resource._kafka_publisher = kafka_publisher
self.simulate_request(
'/log/single',
method='POST',
headers={
headers.X_ROLES.name: ROLES,
headers.X_DIMENSIONS.name: 'a:1',
'Content-Type': 'application/json',
'Content-Length': '0'
}
)
self.assertEqual(falcon.HTTP_204, self.srmock.status)
self.assertEqual(1, kafka_publisher.send_message.call_count)
self.assertEqual(1, log_creator.new_log.call_count)
self.assertEqual(1, log_creator.new_log_envelope.call_count)
@mock.patch('monasca_log_api.app.controller.v2.aid.service.LogCreator')
@mock.patch('monasca_log_api.app.base.log_publisher.LogPublisher')
def test_should_pass_delegate_cross_tenant_id_ok_role(self,
log_creator,
log_publisher):
resource = _init_resource(self)
resource._log_creator = log_creator
resource._kafka_publisher = log_publisher
self.simulate_request(
'/log/single',
method='POST',
query_string='tenant_id=1',
headers={
headers.X_ROLES.name: ROLES,
headers.X_DIMENSIONS.name: 'a:1',
'Content-Type': 'application/json',
'Content-Length': '0'
}
)
self.assertEqual(falcon.HTTP_204, self.srmock.status)
self.assertEqual(1, log_publisher.send_message.call_count)
self.assertEqual(1, log_creator.new_log.call_count)
self.assertEqual(1, log_creator.new_log_envelope.call_count)
@mock.patch('monasca_common.rest.utils')
@mock.patch('monasca_log_api.app.base.log_publisher.LogPublisher')
def test_should_fail_empty_dimensions_delegate(self, _, rest_utils):
_init_resource(self)
rest_utils.read_body.return_value = True
self.simulate_request(
'/log/single',
method='POST',
headers={
headers.X_ROLES.name: ROLES,
headers.X_DIMENSIONS.name: '',
'Content-Type': 'application/json',
'Content-Length': '0'
},
body='{"message":"test"}'
)
self.assertEqual(log_api_exceptions.HTTP_422, self.srmock.status)
@mock.patch('monasca_log_api.app.controller.v2.aid.service.LogCreator')
@mock.patch('monasca_log_api.app.base.log_publisher.LogPublisher')
def test_should_fail_for_invalid_content_type(self, _, __):
_init_resource(self)
self.simulate_request(
'/log/single',
method='POST',
headers={
headers.X_ROLES.name: ROLES,
headers.X_DIMENSIONS.name: '',
'Content-Type': 'video/3gpp',
'Content-Length': '0'
}
)
self.assertEqual(falcon.HTTP_415, self.srmock.status)
@mock.patch('monasca_log_api.app.controller.v2.aid.service.LogCreator')
@mock.patch('monasca_log_api.app.base.log_publisher.LogPublisher')
def test_should_pass_payload_size_not_exceeded(self, _, __):
_init_resource(self)
max_log_size = 1000
content_length = max_log_size - 100
self.conf_override(max_log_size=max_log_size, group='service')
self.simulate_request(
'/log/single',
method='POST',
headers={
headers.X_ROLES.name: ROLES,
headers.X_DIMENSIONS.name: '',
'Content-Type': 'application/json',
'Content-Length': str(content_length)
}
)
self.assertEqual(falcon.HTTP_204, self.srmock.status)
@mock.patch('monasca_log_api.app.controller.v2.aid.service.LogCreator')
@mock.patch('monasca_log_api.app.base.log_publisher.LogPublisher')
def test_should_fail_payload_size_exceeded(self, _, __):
_init_resource(self)
max_log_size = 1000
content_length = max_log_size + 100
self.conf_override(max_log_size=max_log_size, group='service')
self.simulate_request(
'/log/single',
method='POST',
headers={
headers.X_ROLES.name: ROLES,
headers.X_DIMENSIONS.name: '',
'Content-Type': 'application/json',
'Content-Length': str(content_length)
}
)
self.assertEqual(falcon.HTTP_413, self.srmock.status)
@mock.patch('monasca_log_api.app.controller.v2.aid.service.LogCreator')
@mock.patch('monasca_log_api.app.base.log_publisher.LogPublisher')
def test_should_fail_payload_size_equal(self, _, __):
_init_resource(self)
max_log_size = 1000
content_length = max_log_size
self.conf_override(max_log_size=max_log_size, group='service')
self.simulate_request(
'/log/single',
method='POST',
headers={
headers.X_ROLES.name: ROLES,
headers.X_DIMENSIONS.name: '',
'Content-Type': 'application/json',
'Content-Length': str(content_length)
}
)
self.assertEqual(falcon.HTTP_413, self.srmock.status)
@mock.patch('monasca_log_api.app.controller.v2.aid.service.LogCreator')
@mock.patch('monasca_log_api.app.base.log_publisher.LogPublisher')
def test_should_fail_content_length(self, _, __):
_init_resource(self)
self.simulate_request(
'/log/single',
method='POST',
headers={
headers.X_ROLES.name: ROLES,
headers.X_DIMENSIONS.name: '',
'Content-Type': 'application/json'
}
)
self.assertEqual(falcon.HTTP_411, self.srmock.status)

View File

@@ -0,0 +1,317 @@
# Copyright 2016-2017 FUJITSU LIMITED
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import ujson as json
import falcon
import mock
from monasca_log_api.app.base import exceptions as log_api_exceptions
from monasca_log_api.app.controller.api import headers
from monasca_log_api.app.controller.v3 import logs
from monasca_log_api.tests import base
ENDPOINT = '/logs'
TENANT_ID = 'bob'
ROLES = 'admin'
def _init_resource(test):
resource = logs.Logs()
test.api.add_route(ENDPOINT, resource)
return resource
def _generate_v3_payload(log_count=None, messages=None):
if not log_count and messages:
log_count = len(messages)
v3_logs = [{
'message': messages[it],
'dimensions': {
'hostname': 'host_%d' % it,
'component': 'component_%d' % it,
'service': 'service_%d' % it
}
} for it in range(log_count)]
else:
v3_logs = [{
'message': base.generate_unique_message(100),
'dimensions': {
'hostname': 'host_%d' % it,
'component': 'component_%d' % it,
'service': 'service_%d' % it
}
} for it in range(log_count)]
v3_body = {
'dimensions': {
'origin': __name__
},
'logs': v3_logs
}
return v3_body, v3_logs
class TestApiLogsVersion(base.BaseApiTestCase):
@mock.patch('monasca_log_api.app.controller.v3.aid'
'.bulk_processor.BulkProcessor')
def test_should_return_v3_as_version(self, _):
logs_resource = logs.Logs()
self.assertEqual('v3.0', logs_resource.version)
@mock.patch('monasca_log_api.app.base.log_publisher.producer.KafkaProducer')
@mock.patch('monasca_log_api.monitoring.client.monascastatsd.Connection')
class TestApiLogsMonitoring(base.BaseApiTestCase):
def test_monitor_bulk_rejected(self, __, _):
res = _init_resource(self)
in_counter = res._logs_in_counter.increment = mock.Mock()
bulk_counter = res._bulks_rejected_counter.increment = mock.Mock()
rejected_counter = res._logs_rejected_counter.increment = mock.Mock()
size_gauge = res._logs_size_gauge.send = mock.Mock()
res._get_logs = mock.Mock(
side_effect=log_api_exceptions.HTTPUnprocessableEntity(''))
log_count = 1
v3_body, _ = _generate_v3_payload(log_count)
payload = json.dumps(v3_body)
content_length = len(payload)
self.simulate_request(
ENDPOINT,
method='POST',
headers={
headers.X_ROLES.name: ROLES,
headers.X_TENANT_ID.name: TENANT_ID,
'Content-Type': 'application/json',
'Content-Length': str(content_length)
},
body=payload
)
self.assertEqual(1, bulk_counter.call_count)
self.assertEqual(0, in_counter.call_count)
self.assertEqual(0, rejected_counter.call_count)
self.assertEqual(0, size_gauge.call_count)
def test_monitor_not_all_logs_ok(self, __, _):
res = _init_resource(self)
in_counter = res._logs_in_counter.increment = mock.Mock()
bulk_counter = res._bulks_rejected_counter.increment = mock.Mock()
rejected_counter = res._logs_rejected_counter.increment = mock.Mock()
size_gauge = res._logs_size_gauge.send = mock.Mock()
log_count = 5
reject_logs = 1
v3_body, _ = _generate_v3_payload(log_count)
payload = json.dumps(v3_body)
content_length = len(payload)
side_effects = [{} for ___ in range(log_count - reject_logs)]
side_effects.append(log_api_exceptions.HTTPUnprocessableEntity(''))
res._processor._get_dimensions = mock.Mock(side_effect=side_effects)
self.simulate_request(
ENDPOINT,
method='POST',
headers={
headers.X_ROLES.name: ROLES,
headers.X_TENANT_ID.name: TENANT_ID,
'Content-Type': 'application/json',
'Content-Length': str(content_length)
},
body=payload
)
self.assertEqual(1, bulk_counter.call_count)
self.assertEqual(0,
bulk_counter.mock_calls[0][2]['value'])
self.assertEqual(1, in_counter.call_count)
self.assertEqual(log_count - reject_logs,
in_counter.mock_calls[0][2]['value'])
self.assertEqual(1, rejected_counter.call_count)
self.assertEqual(reject_logs,
rejected_counter.mock_calls[0][2]['value'])
self.assertEqual(1, size_gauge.call_count)
self.assertEqual(content_length,
size_gauge.mock_calls[0][2]['value'])
def test_monitor_all_logs_ok(self, __, _):
res = _init_resource(self)
in_counter = res._logs_in_counter.increment = mock.Mock()
bulk_counter = res._bulks_rejected_counter.increment = mock.Mock()
rejected_counter = res._logs_rejected_counter.increment = mock.Mock()
size_gauge = res._logs_size_gauge.send = mock.Mock()
res._send_logs = mock.Mock()
log_count = 10
v3_body, _ = _generate_v3_payload(log_count)
payload = json.dumps(v3_body)
content_length = len(payload)
self.simulate_request(
ENDPOINT,
method='POST',
headers={
headers.X_ROLES.name: ROLES,
headers.X_TENANT_ID.name: TENANT_ID,
'Content-Type': 'application/json',
'Content-Length': str(content_length)
},
body=payload
)
self.assertEqual(1, bulk_counter.call_count)
self.assertEqual(0,
bulk_counter.mock_calls[0][2]['value'])
self.assertEqual(1, in_counter.call_count)
self.assertEqual(log_count,
in_counter.mock_calls[0][2]['value'])
self.assertEqual(1, rejected_counter.call_count)
self.assertEqual(0,
rejected_counter.mock_calls[0][2]['value'])
self.assertEqual(1, size_gauge.call_count)
self.assertEqual(content_length,
size_gauge.mock_calls[0][2]['value'])
class TestApiLogs(base.BaseApiTestCase):
@mock.patch('monasca_log_api.app.controller.v3.aid.bulk_processor.'
'BulkProcessor')
def test_should_pass_cross_tenant_id(self, bulk_processor):
logs_resource = _init_resource(self)
logs_resource._processor = bulk_processor
v3_body, v3_logs = _generate_v3_payload(1)
payload = json.dumps(v3_body)
content_length = len(payload)
self.simulate_request(
'/logs',
method='POST',
query_string='tenant_id=1',
headers={
headers.X_ROLES.name: ROLES,
'Content-Type': 'application/json',
'Content-Length': str(content_length)
},
body=payload
)
self.assertEqual(falcon.HTTP_204, self.srmock.status)
logs_resource._processor.send_message.assert_called_with(
logs=v3_logs,
global_dimensions=v3_body['dimensions'],
log_tenant_id='1')
@mock.patch('monasca_log_api.app.controller.v3.aid.bulk_processor.'
'BulkProcessor')
def test_should_fail_not_delegate_ok_cross_tenant_id(self, _):
_init_resource(self)
self.simulate_request(
'/logs',
method='POST',
query_string='tenant_id=1',
headers={
headers.X_ROLES.name: ROLES,
'Content-Type': 'application/json',
'Content-Length': '0'
}
)
self.assertEqual(falcon.HTTP_400, self.srmock.status)
@mock.patch('monasca_log_api.app.controller.v3.aid.bulk_processor.'
'BulkProcessor')
def test_should_pass_empty_cross_tenant_id_wrong_role(self,
bulk_processor):
logs_resource = _init_resource(self)
logs_resource._processor = bulk_processor
v3_body, _ = _generate_v3_payload(1)
payload = json.dumps(v3_body)
content_length = len(payload)
self.simulate_request(
'/logs',
method='POST',
headers={
headers.X_ROLES.name: ROLES,
'Content-Type': 'application/json',
'Content-Length': str(content_length)
},
body=payload
)
self.assertEqual(falcon.HTTP_204, self.srmock.status)
self.assertEqual(1, bulk_processor.send_message.call_count)
@mock.patch('monasca_log_api.app.controller.v3.aid.bulk_processor.'
'BulkProcessor')
def test_should_pass_empty_cross_tenant_id_ok_role(self,
bulk_processor):
logs_resource = _init_resource(self)
logs_resource._processor = bulk_processor
v3_body, _ = _generate_v3_payload(1)
payload = json.dumps(v3_body)
content_length = len(payload)
self.simulate_request(
'/logs',
method='POST',
headers={
headers.X_ROLES.name: ROLES,
'Content-Type': 'application/json',
'Content-Length': str(content_length)
},
body=payload
)
self.assertEqual(falcon.HTTP_204, self.srmock.status)
self.assertEqual(1, bulk_processor.send_message.call_count)
class TestUnicodeLogs(base.BaseApiTestCase):
@mock.patch('monasca_log_api.app.base.log_publisher.producer.'
'KafkaProducer')
def test_should_send_unicode_messages(self, _):
_init_resource(self)
messages = [m['input'] for m in base.UNICODE_MESSAGES]
v3_body, _ = _generate_v3_payload(messages=messages)
payload = json.dumps(v3_body, ensure_ascii=False)
content_length = len(payload)
self.simulate_request(
'/logs',
method='POST',
headers={
headers.X_ROLES.name: ROLES,
'Content-Type': 'application/json',
'Content-Length': str(content_length)
},
body=payload
)
self.assertEqual(falcon.HTTP_204, self.srmock.status)

View File

@@ -0,0 +1,52 @@
# Copyright 2016-2017 FUJITSU LIMITED
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from monasca_log_api.monitoring import client
from monasca_log_api.tests import base
class TestMonitoring(base.BaseTestCase):
@mock.patch('monasca_log_api.monitoring.client.monascastatsd')
def test_should_use_default_dimensions_if_none_specified(self,
monascastatsd):
client.get_client()
statsd_client = monascastatsd.Client
expected_dimensions = client._DEFAULT_DIMENSIONS
actual_dimensions = statsd_client.call_args[1]['dimensions']
self.assertEqual(1, statsd_client.call_count)
self.assertEqual(expected_dimensions, actual_dimensions)
@mock.patch('monasca_log_api.monitoring.client.monascastatsd')
def test_should_not_override_fixed_dimensions(self,
monascastatsd):
dims = {
'service': 'foo',
'component': 'bar'
}
client.get_client(dims)
statsd_client = monascastatsd.Client
expected_dimensions = client._DEFAULT_DIMENSIONS
actual_dimensions = statsd_client.call_args[1]['dimensions']
self.assertEqual(1, statsd_client.call_count)
self.assertEqual(expected_dimensions, actual_dimensions)

View File

@@ -0,0 +1,212 @@
# Copyright 2016-2017 FUJITSU LIMITED
# Copyright 2018 OP5 AB
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from falcon import testing
from monasca_common.policy import policy_engine as policy
from monasca_log_api.app.base import request
from monasca_log_api.policies import roles_list_to_check_str
from monasca_log_api.tests import base
from oslo_context import context
from oslo_policy import policy as os_policy
class TestPolicyFileCase(base.BaseTestCase):
def setUp(self):
super(TestPolicyFileCase, self).setUp()
self.context = context.RequestContext(user='fake',
tenant='fake',
roles=['fake'])
self.target = {'tenant_id': 'fake'}
def test_modified_policy_reloads(self):
tmp_file = \
self.create_tempfiles(files=[('policies', '{}')], ext='.yaml')[0]
base.BaseTestCase.conf_override(policy_file=tmp_file,
group='oslo_policy')
policy.reset()
policy.init()
action = 'example:test'
rule = os_policy.RuleDefault(action, '')
policy._ENFORCER.register_defaults([rule])
with open(tmp_file, 'w') as policy_file:
policy_file.write('{"example:test": ""}')
policy.authorize(self.context, action, self.target)
with open(tmp_file, 'w') as policy_file:
policy_file.write('{"example:test": "!"}')
policy._ENFORCER.load_rules(True)
self.assertRaises(os_policy.PolicyNotAuthorized, policy.authorize,
self.context, action, self.target)
class TestPolicyCase(base.BaseTestCase):
def setUp(self):
super(TestPolicyCase, self).setUp()
rules = [
os_policy.RuleDefault("true", "@"),
os_policy.RuleDefault("example:allowed", "@"),
os_policy.RuleDefault("example:denied", "!"),
os_policy.RuleDefault("example:lowercase_monasca_user",
"role:monasca_user or role:sysadmin"),
os_policy.RuleDefault("example:uppercase_monasca_user",
"role:MONASCA_USER or role:sysadmin"),
]
policy.reset()
policy.init()
policy._ENFORCER.register_defaults(rules)
def test_authorize_nonexist_action_throws(self):
action = "example:noexist"
ctx = request.Request(
testing.create_environ(
path="/",
headers={
"X_USER_ID": "fake",
"X_PROJECT_ID": "fake",
"X_ROLES": "member"
}
)
)
self.assertRaises(os_policy.PolicyNotRegistered, policy.authorize,
ctx.context, action, {})
def test_authorize_bad_action_throws(self):
action = "example:denied"
ctx = request.Request(
testing.create_environ(
path="/",
headers={
"X_USER_ID": "fake",
"X_PROJECT_ID": "fake",
"X_ROLES": "member"
}
)
)
self.assertRaises(os_policy.PolicyNotAuthorized, policy.authorize,
ctx.context, action, {})
def test_authorize_bad_action_no_exception(self):
action = "example:denied"
ctx = request.Request(
testing.create_environ(
path="/",
headers={
"X_USER_ID": "fake",
"X_PROJECT_ID": "fake",
"X_ROLES": "member"
}
)
)
result = policy.authorize(ctx.context, action, {}, False)
self.assertFalse(result)
def test_authorize_good_action(self):
action = "example:allowed"
ctx = request.Request(
testing.create_environ(
path="/",
headers={
"X_USER_ID": "fake",
"X_PROJECT_ID": "fake",
"X_ROLES": "member"
}
)
)
result = policy.authorize(ctx.context, action, {}, False)
self.assertTrue(result)
def test_ignore_case_role_check(self):
lowercase_action = "example:lowercase_monasca_user"
uppercase_action = "example:uppercase_monasca_user"
monasca_user_context = request.Request(
testing.create_environ(
path="/",
headers={
"X_USER_ID": "monasca_user",
"X_PROJECT_ID": "fake",
"X_ROLES": "MONASCA_user"
}
)
)
self.assertTrue(policy.authorize(monasca_user_context.context,
lowercase_action,
{}))
self.assertTrue(policy.authorize(monasca_user_context.context,
uppercase_action,
{}))
class RegisteredPoliciesTestCase(base.BaseTestCase):
def __init__(self, *args, **kwds):
super(RegisteredPoliciesTestCase, self).__init__(*args, **kwds)
self.default_roles = ['monasca-user', 'admin']
def test_healthchecks_policies_roles(self):
healthcheck_policies = {
'log_api:healthcheck:head': ['any_role'],
'log_api:healthcheck:get': ['any_role']
}
self._assert_rules(healthcheck_policies)
def test_versions_policies_roles(self):
versions_policies = {
'log_api:versions:get': ['any_role']
}
self._assert_rules(versions_policies)
def test_logs_policies_roles(self):
logs_policies = {
'log_api:logs:post': self.default_roles
}
self._assert_rules(logs_policies)
def _assert_rules(self, policies_list):
for policy_name in policies_list:
registered_rule = policy.get_rules()[policy_name]
if hasattr(registered_rule, 'rules'):
self.assertEqual(len(registered_rule.rules),
len(policies_list[policy_name]))
for role in policies_list[policy_name]:
ctx = self._get_request_context(role)
self.assertTrue(policy.authorize(ctx.context,
policy_name,
{})
)
@staticmethod
def _get_request_context(role):
return request.Request(
testing.create_environ(
path='/',
headers={'X_ROLES': role}
)
)
class PolicyUtilsTestCase(base.BaseTestCase):
def test_roles_list_to_check_str(self):
self.assertEqual(roles_list_to_check_str(['test_role']), 'role:test_role')
self.assertEqual(roles_list_to_check_str(['role1', 'role2', 'role3']),
'role:role1 or role:role2 or role:role3')
self.assertEqual(roles_list_to_check_str(['@']), '@')
self.assertEqual(roles_list_to_check_str(['role1', '@', 'role2']),
'role:role1 or @ or role:role2')
self.assertIsNone(roles_list_to_check_str(None))

View File

@@ -0,0 +1,100 @@
# Copyright 2016-2017 FUJITSU LIMITED
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from falcon import testing
from mock import mock
from monasca_log_api.app.base import request
from monasca_log_api.app.base import validation
from monasca_log_api.tests import base
class TestRequest(base.BaseTestCase):
def test_use_context_from_request(self):
req = request.Request(
testing.create_environ(
path='/',
headers={
'X_AUTH_TOKEN': '111',
'X_USER_ID': '222',
'X_PROJECT_ID': '333',
'X_ROLES': 'terminator,predator'
}
)
)
self.assertEqual('111', req.context.auth_token)
self.assertEqual('222', req.user_id)
self.assertEqual('333', req.project_id)
self.assertEqual(['terminator', 'predator'], req.roles)
def test_validate_context_type(self):
with mock.patch.object(validation,
'validate_content_type') as vc_type, \
mock.patch.object(validation,
'validate_payload_size') as vp_size, \
mock.patch.object(validation,
'validate_cross_tenant') as vc_tenant:
req = request.Request(testing.create_environ())
vc_type.side_effect = Exception()
try:
req.validate(['test'])
except Exception as ex:
self.assertEqual(1, vc_type.call_count)
self.assertEqual(0, vp_size.call_count)
self.assertEqual(0, vc_tenant.call_count)
self.assertIsInstance(ex, Exception)
def test_validate_payload_size(self):
with mock.patch.object(validation,
'validate_content_type') as vc_type, \
mock.patch.object(validation,
'validate_payload_size') as vp_size, \
mock.patch.object(validation,
'validate_cross_tenant') as vc_tenant:
req = request.Request(testing.create_environ())
vp_size.side_effect = Exception()
try:
req.validate(['test'])
except Exception as ex:
self.assertEqual(1, vc_type.call_count)
self.assertEqual(1, vp_size.call_count)
self.assertEqual(0, vc_tenant.call_count)
self.assertIsInstance(ex, Exception)
def test_validate_cross_tenant(self):
with mock.patch.object(validation,
'validate_content_type') as vc_type, \
mock.patch.object(validation,
'validate_payload_size') as vp_size, \
mock.patch.object(validation,
'validate_cross_tenant') as vc_tenant:
req = request.Request(testing.create_environ())
vc_tenant.side_effect = Exception()
try:
req.validate(['test'])
except Exception as ex:
self.assertEqual(1, vc_type.call_count)
self.assertEqual(1, vp_size.call_count)
self.assertEqual(1, vc_tenant.call_count)
self.assertIsInstance(ex, Exception)

View File

@@ -0,0 +1,226 @@
# Copyright 2015-2017 FUJITSU LIMITED
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from monasca_log_api.middleware import role_middleware as rm
from monasca_log_api.tests import base
from webob import response
class SideLogicTestEnsureLowerRoles(base.BaseTestCase):
def test_should_ensure_lower_roles(self):
roles = ['CMM-Admin', ' CmM-User ']
expected = ['cmm-admin', 'cmm-user']
self.assertItemsEqual(expected, rm._ensure_lower_roles(roles))
def test_should_return_empty_array_for_falsy_input_1(self):
roles = []
expected = []
self.assertItemsEqual(expected, rm._ensure_lower_roles(roles))
def test_should_return_empty_array_for_falsy_input_2(self):
roles = None
expected = []
self.assertItemsEqual(expected, rm._ensure_lower_roles(roles))
class SideLogicTestIntersect(base.BaseTestCase):
def test_should_intersect_seqs(self):
seq_1 = [1, 2, 3]
seq_2 = [2]
expected = [2]
self.assertItemsEqual(expected, rm._intersect(seq_1, seq_2))
self.assertItemsEqual(expected, rm._intersect(seq_2, seq_1))
def test_should_intersect_empty(self):
seq_1 = []
seq_2 = []
expected = []
self.assertItemsEqual(expected, rm._intersect(seq_1, seq_2))
self.assertItemsEqual(expected, rm._intersect(seq_2, seq_1))
def test_should_not_intersect_without_common_elements(self):
seq_1 = [1, 2, 3]
seq_2 = [4, 5, 6]
expected = []
self.assertItemsEqual(expected, rm._intersect(seq_1, seq_2))
self.assertItemsEqual(expected, rm._intersect(seq_2, seq_1))
class RolesMiddlewareSideLogicTest(base.BaseTestCase):
def test_should_apply_middleware_for_valid_path(self):
paths = ['/', '/v2.0/', '/v2.0/log/']
instance = rm.RoleMiddleware(None)
instance._path = paths
for p in paths:
req = mock.Mock()
req.method = 'GET'
req.path = p
self.assertTrue(instance._can_apply_middleware(req))
def test_should_apply_middleware_for_invalid_path(self):
paths = ['/v2.0/', '/v2.0/log/']
instance = rm.RoleMiddleware(None)
instance._path = paths
for p in paths:
pp = 'test/%s' % p
req = mock.Mock()
req.method = 'GET'
req.path = pp
self.assertFalse(instance._can_apply_middleware(req))
def test_should_reject_OPTIONS_request(self):
instance = rm.RoleMiddleware(None)
req = mock.Mock()
req.method = 'OPTIONS'
req.path = '/'
self.assertFalse(instance._can_apply_middleware(req))
def test_should_return_true_if_authenticated(self):
instance = rm.RoleMiddleware(None)
req = mock.Mock()
req.headers = {rm._X_IDENTITY_STATUS: rm._CONFIRMED_STATUS}
self.assertTrue(instance._is_authenticated(req))
def test_should_return_false_if_not_authenticated(self):
instance = rm.RoleMiddleware(None)
req = mock.Mock()
req.headers = {rm._X_IDENTITY_STATUS: 'Some_Other_Status'}
self.assertFalse(instance._is_authenticated(req))
def test_should_return_false_if_identity_status_not_found(self):
instance = rm.RoleMiddleware(None)
req = mock.Mock()
req.headers = {}
self.assertFalse(instance._is_authenticated(req))
def test_should_return_true_if_is_agent(self):
roles = 'cmm-admin,cmm-user'
roles_array = roles.split(',')
default_roles = [roles_array[0]]
admin_roles = [roles_array[1]]
instance = rm.RoleMiddleware(None)
instance._default_roles = default_roles
instance._agent_roles = admin_roles
req = mock.Mock()
req.headers = {rm._X_ROLES: roles}
is_agent = instance._is_agent(req)
self.assertTrue(is_agent)
class RolesMiddlewareLogicTest(base.BaseTestCase):
def test_not_process_further_if_cannot_apply_path(self):
roles = 'cmm-admin,cmm-user'
roles_array = roles.split(',')
default_roles = [roles_array[0]]
admin_roles = [roles_array[1]]
instance = rm.RoleMiddleware(None)
instance._default_roles = default_roles
instance._agent_roles = admin_roles
instance._path = ['/test']
# spying
instance._is_authenticated = mock.Mock()
instance._is_agent = mock.Mock()
req = mock.Mock()
req.headers = {rm._X_ROLES: roles}
req.path = '/different/test'
instance.process_request(req=req)
self.assertFalse(instance._is_authenticated.called)
self.assertFalse(instance._is_agent.called)
def test_not_process_further_if_cannot_apply_method(self):
roles = 'cmm-admin,cmm-user'
roles_array = roles.split(',')
default_roles = [roles_array[0]]
admin_roles = [roles_array[1]]
instance = rm.RoleMiddleware(None)
instance._default_roles = default_roles
instance._agent_roles = admin_roles
instance._path = ['/test']
# spying
instance._is_authenticated = mock.Mock()
instance._is_agent = mock.Mock()
req = mock.Mock()
req.headers = {rm._X_ROLES: roles}
req.path = '/test'
req.method = 'OPTIONS'
instance.process_request(req=req)
self.assertFalse(instance._is_authenticated.called)
self.assertFalse(instance._is_agent.called)
def test_should_produce_json_response_if_not_authenticated(
self):
instance = rm.RoleMiddleware(None)
is_agent = True
is_authenticated = False
instance._can_apply_middleware = mock.Mock(return_value=True)
instance._is_agent = mock.Mock(return_value=is_agent)
instance._is_authenticated = mock.Mock(return_value=is_authenticated)
req = mock.Mock()
req.environ = {}
req.headers = {
'X-Tenant-Id': '11111111'
}
result = instance.process_request(req=req)
self.assertIsNotNone(result)
self.assertIsInstance(result, response.Response)
status = result.status_code
json_body = result.json_body
message = json_body.get('message')
self.assertIn('Failed to authenticate request for', message)
self.assertEqual(401, status)

View File

@@ -0,0 +1,483 @@
# Copyright 2015 kornicameister@gmail.com
# Copyright 2016-2017 FUJITSU LIMITED
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import datetime
import unittest
import mock
from falcon import errors
from falcon import testing
from monasca_log_api.app.base import exceptions
from monasca_log_api.app.base import validation
from monasca_log_api.app.controller.v2.aid import service as aid_service
from monasca_log_api.tests import base
class IsDelegate(base.BaseTestCase):
def __init__(self, *args, **kwargs):
super(IsDelegate, self).__init__(*args, **kwargs)
self._roles = ['admin']
def test_is_delegate_ok_role(self):
self.assertTrue(validation.validate_is_delegate(self._roles))
def test_is_delegate_ok_role_in_roles(self):
self._roles.extend(['a_role', 'b_role'])
self.assertTrue(validation.validate_is_delegate(self._roles))
def test_is_delegate_not_ok_role(self):
roles = ['a_role', 'b_role']
self.assertFalse(validation.validate_is_delegate(roles))
class ParseDimensions(base.BaseTestCase):
def test_should_fail_for_empty_dimensions(self):
self.assertRaises(exceptions.HTTPUnprocessableEntity,
aid_service.parse_dimensions,
'')
self.assertRaises(exceptions.HTTPUnprocessableEntity,
aid_service.parse_dimensions,
None)
def test_should_fail_for_empty_dim_in_dimensions(self):
err = self.assertRaises(exceptions.HTTPUnprocessableEntity,
aid_service.parse_dimensions,
',')
self.assertEqual(err.description, 'Dimension cannot be empty')
def test_should_fail_for_invalid_dim_in_dimensions(self):
invalid_dim = 'a'
err = self.assertRaises(exceptions.HTTPUnprocessableEntity,
aid_service.parse_dimensions,
invalid_dim)
self.assertEqual(err.description, '%s is not a valid dimension'
% invalid_dim)
def test_should_pass_for_valid_dimensions(self):
dimensions = 'a:1,b:2'
expected = {
'a': '1',
'b': '2'
}
self.assertDictEqual(expected,
aid_service.parse_dimensions(dimensions))
class ParseApplicationType(base.BaseTestCase):
def test_should_return_none_for_none(self):
self.assertIsNone(aid_service.parse_application_type(None))
def test_should_return_none_for_empty(self):
self.assertIsNone(aid_service.parse_application_type(''))
def test_should_return_none_for_whitespace_filled(self):
self.assertIsNone(aid_service.parse_application_type(' '))
def test_should_return_value_for_ok_value(self):
app_type = 'monasca'
self.assertEqual(app_type,
aid_service.parse_application_type(app_type))
def test_should_return_value_for_ok_value_with_spaces(self):
app_type = ' monasca '
expected = 'monasca'
self.assertEqual(expected,
aid_service.parse_application_type(app_type))
class ApplicationTypeValidations(base.BaseTestCase):
def test_should_pass_for_empty_app_type(self):
validation.validate_application_type()
validation.validate_application_type('')
def test_should_fail_for_invalid_length(self):
r_app_type = testing.rand_string(300, 600)
err = self.assertRaises(exceptions.HTTPUnprocessableEntity,
validation.validate_application_type,
r_app_type)
length = validation.APPLICATION_TYPE_CONSTRAINTS['MAX_LENGTH']
msg = ('Application type {type} must be '
'{length} characters or less'.format(type=r_app_type,
length=length))
self.assertEqual(err.description, msg)
def test_should_fail_for_invalid_content(self):
r_app_type = '%#$@!'
err = self.assertRaises(exceptions.HTTPUnprocessableEntity,
validation.validate_application_type,
r_app_type)
msg = ('Application type %s may only contain: "a-z A-Z 0-9 _ - ."' %
r_app_type)
self.assertEqual(err.description, msg)
def test_should_pass_for_ok_app_type(self):
r_app_type = 'monasca'
validation.validate_application_type(r_app_type)
class DimensionsValidations(base.BaseTestCase):
@unittest.expectedFailure
def test_should_fail_for_none_dimensions(self):
validation.validate_dimensions(None)
@unittest.expectedFailure
def test_should_fail_pass_for_non_iterable_dimensions_str(self):
validation.validate_dimensions('')
@unittest.expectedFailure
def test_should_fail_pass_for_non_iterable_dimensions_number(self):
validation.validate_dimensions(1)
def test_should_pass_for_empty_dimensions_array(self):
validation.validate_dimensions({})
def test_should_fail_too_empty_name(self):
dimensions = {'': 1}
err = self.assertRaises(exceptions.HTTPUnprocessableEntity,
validation.validate_dimensions,
dimensions)
msg = 'Dimension name cannot be empty'
self.assertEqual(err.description, msg)
def test_should_fail_too_long_name(self):
name = testing.rand_string(256, 260)
dimensions = {name: 1}
err = self.assertRaises(exceptions.HTTPUnprocessableEntity,
validation.validate_dimensions,
dimensions)
msg = 'Dimension name %s must be 255 characters or less' % name
self.assertEqual(err.description, msg)
def test_should_fail_underscore_at_begin(self):
name = '_aDim'
dimensions = {name: 1}
err = self.assertRaises(exceptions.HTTPUnprocessableEntity,
validation.validate_dimensions,
dimensions)
msg = 'Dimension name %s cannot start with underscore (_)' % name
self.assertEqual(err.description, msg)
def test_should_fail_invalid_chars(self):
name = '<>'
dimensions = {name: 1}
err = self.assertRaises(exceptions.HTTPUnprocessableEntity,
validation.validate_dimensions,
dimensions)
invalid_chars = '> < = { } ( ) \' " , ; &'
msg = 'Dimension name %s may not contain: %s' % (name, invalid_chars)
self.assertEqual(err.description, msg)
def test_should_fail_ok_name_empty_value(self):
name = 'monasca'
dimensions = {name: ''}
err = self.assertRaises(exceptions.HTTPUnprocessableEntity,
validation.validate_dimensions,
dimensions)
msg = 'Dimension value cannot be empty'
self.assertEqual(err.description, msg)
def test_should_fail_ok_name_too_long_value(self):
name = 'monasca'
value = testing.rand_string(256, 300)
dimensions = {name: value}
err = self.assertRaises(exceptions.HTTPUnprocessableEntity,
validation.validate_dimensions,
dimensions)
msg = 'Dimension value %s must be 255 characters or less' % value
self.assertEqual(err.description, msg)
def test_should_pass_ok_name_ok_value_empty_service(self):
name = 'monasca'
value = '1'
dimensions = {name: value}
validation.validate_dimensions(dimensions)
def test_should_pass_ok_name_ok_value_service_SERVICE_DIMENSIONS_as_name(
self):
name = 'some_name'
value = '1'
dimensions = {name: value}
validation.validate_dimensions(dimensions)
class ContentTypeValidations(base.BaseTestCase):
def test_should_pass_text_plain(self):
content_type = 'text/plain'
allowed_types = ['text/plain']
req = mock.Mock()
req.content_type = content_type
validation.validate_content_type(req, allowed_types)
def test_should_pass_application_json(self):
content_type = 'application/json'
allowed_types = ['application/json']
req = mock.Mock()
req.content_type = content_type
validation.validate_content_type(req, allowed_types)
def test_should_fail_invalid_content_type(self):
content_type = 'no/such/type'
allowed_types = ['application/json']
req = mock.Mock()
req.content_type = content_type
self.assertRaises(
errors.HTTPUnsupportedMediaType,
validation.validate_content_type,
req,
allowed_types
)
def test_should_fail_missing_header(self):
content_type = None
allowed_types = ['application/json']
req = mock.Mock()
req.content_type = content_type
self.assertRaises(
errors.HTTPMissingHeader,
validation.validate_content_type,
req,
allowed_types
)
class PayloadSizeValidations(base.BaseTestCase):
def test_should_fail_missing_header(self):
content_length = None
req = mock.Mock()
req.content_length = content_length
self.assertRaises(
errors.HTTPLengthRequired,
validation.validate_payload_size,
req
)
def test_should_pass_limit_not_exceeded(self):
content_length = 120
max_log_size = 240
self.conf_override(max_log_size=max_log_size,
group='service')
req = mock.Mock()
req.content_length = content_length
validation.validate_payload_size(req)
def test_should_fail_limit_exceeded(self):
content_length = 120
max_log_size = 60
self.conf_override(max_log_size=max_log_size,
group='service')
req = mock.Mock()
req.content_length = content_length
self.assertRaises(
errors.HTTPRequestEntityTooLarge,
validation.validate_payload_size,
req
)
def test_should_fail_limit_equal(self):
content_length = 120
max_log_size = 120
self.conf_override(max_log_size=max_log_size,
group='service')
req = mock.Mock()
req.content_length = content_length
self.assertRaises(
errors.HTTPRequestEntityTooLarge,
validation.validate_payload_size,
req
)
class LogMessageValidations(base.BaseTestCase):
def test_should_pass_message_in_log_property(self):
log_object = {
'message': 'some messages',
'application_type': 'monasca-log-api',
'dimensions': {
'hostname': 'devstack'
}
}
validation.validate_log_message(log_object)
@unittest.expectedFailure
def test_should_fail_pass_for_non_message_in_log_property(self):
log_object = {
'massage': 'some messages',
'application_type': 'monasca-log-api',
'dimensions': {
'hostname': 'devstack'
}
}
validation.validate_log_message(log_object)
def test_should_fail_with_empty_message(self):
self.assertRaises(exceptions.HTTPUnprocessableEntity,
validation.validate_log_message, {})
class LogsCreatorNewLog(base.BaseTestCase):
def setUp(self):
super(LogsCreatorNewLog, self).setUp()
self.instance = aid_service.LogCreator()
@mock.patch('io.IOBase')
def test_should_create_log_from_json(self, payload):
msg = u'Hello World'
path = u'/var/log/messages'
json_msg = u'{"path":"%s","message":"%s"}' % (path, msg)
app_type = 'monasca'
dimensions = 'cpu_time:30'
payload.read.return_value = json_msg
expected_log = {
'message': msg,
'dimensions': {
'component': app_type,
'cpu_time': '30'
},
'path': path
}
self.assertEqual(expected_log, self.instance.new_log(
application_type=app_type,
dimensions=dimensions,
payload=payload
))
@mock.patch('io.IOBase')
def test_should_create_log_from_text(self, payload):
msg = u'Hello World'
app_type = 'monasca'
dimension_name = 'cpu_time'
dimension_value = 30
dimensions = '%s:%s' % (dimension_name, str(dimension_value))
payload.read.return_value = msg
expected_log = {
'message': msg,
'dimensions': {
'component': app_type,
dimension_name: str(dimension_value)
}
}
self.assertEqual(expected_log, self.instance.new_log(
application_type=app_type,
dimensions=dimensions,
payload=payload,
content_type='text/plain'
))
class LogCreatorNewEnvelope(base.BaseTestCase):
def setUp(self):
super(LogCreatorNewEnvelope, self).setUp()
self.instance = aid_service.LogCreator()
def test_should_create_envelope(self):
msg = u'Hello World'
path = u'/var/log/messages'
app_type = 'monasca'
dimension_name = 'cpu_time'
dimension_value = 30
expected_log = {
'message': msg,
'application_type': app_type,
'dimensions': {
dimension_name: str(dimension_value)
},
'path': path
}
tenant_id = 'a_tenant'
none = None
meta = {'tenantId': tenant_id, 'region': none}
timestamp = (datetime.datetime.utcnow() -
datetime.datetime(1970, 1, 1)).total_seconds()
expected_envelope = {
'log': expected_log,
'creation_time': timestamp,
'meta': meta
}
with mock.patch.object(self.instance, '_create_meta_info',
return_value=meta):
actual_envelope = self.instance.new_log_envelope(expected_log,
tenant_id)
self.assertEqual(expected_envelope.get('log'),
actual_envelope.get('log'))
self.assertEqual(expected_envelope.get('meta'),
actual_envelope.get('meta'))
self.assertDictEqual(
expected_envelope.get('log').get('dimensions'),
actual_envelope.get('log').get('dimensions'))
@unittest.expectedFailure
def test_should_not_create_log_none(self):
log_object = None
tenant_id = 'a_tenant'
self.instance.new_log_envelope(log_object, tenant_id)
@unittest.expectedFailure
def test_should_not_create_log_empty(self):
log_object = {}
tenant_id = 'a_tenant'
self.instance.new_log_envelope(log_object, tenant_id)
@unittest.expectedFailure
def test_should_not_create_tenant_none(self):
log_object = {
'message': ''
}
tenant_id = None
self.instance.new_log_envelope(log_object, tenant_id)
@unittest.expectedFailure
def test_should_not_create_tenant_empty(self):
log_object = {
'message': ''
}
tenant_id = ''
self.instance.new_log_envelope(log_object, tenant_id)

View File

@@ -0,0 +1,119 @@
# Copyright 2016 FUJITSU LIMITED
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import ujson as json
import mock
from monasca_log_api.app.controller.api import headers
from monasca_log_api.app.controller.v2 import logs as v2_logs
from monasca_log_api.app.controller.v3 import logs as v3_logs
from monasca_log_api.tests import base
class TestApiSameV2V3Output(base.BaseApiTestCase):
# noinspection PyProtectedMember
@mock.patch('monasca_log_api.app.base.log_publisher.'
'producer.KafkaProducer')
def test_send_identical_messages(self, _):
# mocks only log publisher, so the last component that actually
# sends data to kafka
# case is to verify if publisher was called with same arguments
# for both cases
v2 = v2_logs.Logs()
v3 = v3_logs.Logs()
publish_mock = mock.Mock()
v2._kafka_publisher._kafka_publisher.publish = publish_mock
v3._processor._kafka_publisher.publish = publish_mock
component = 'monasca-log-api'
service = 'laas'
hostname = 'kornik'
tenant_id = 'ironMan'
roles = 'admin'
v2_dimensions = 'hostname:%s,service:%s' % (hostname, service)
v3_dimensions = {
'hostname': hostname,
'component': component,
'service': service
}
v2_body = {
'message': 'test'
}
v3_body = {
'logs': [
{
'message': 'test',
'dimensions': v3_dimensions
}
]
}
self.api.add_route('/v2.0', v2)
self.api.add_route('/v3.0', v3)
self.simulate_request(
'/v2.0',
method='POST',
headers={
headers.X_ROLES.name: roles,
headers.X_DIMENSIONS.name: v2_dimensions,
headers.X_APPLICATION_TYPE.name: component,
headers.X_TENANT_ID.name: tenant_id,
'Content-Type': 'application/json',
'Content-Length': '100'
},
body=json.dumps(v2_body)
)
self.simulate_request(
'/v3.0',
method='POST',
headers={
headers.X_ROLES.name: roles,
headers.X_TENANT_ID.name: tenant_id,
'Content-Type': 'application/json',
'Content-Length': '100'
},
body=json.dumps(v3_body)
)
self.assertEqual(2, publish_mock.call_count)
# in v2 send_messages is called with single envelope
v2_send_msg_arg = publish_mock.mock_calls[0][1][1]
# in v3 it is always called with list of envelopes
v3_send_msg_arg = publish_mock.mock_calls[1][1][1]
self.maxDiff = None
# at this point we know that both args should be identical
self.assertEqual(type(v2_send_msg_arg), type(v3_send_msg_arg))
self.assertIsInstance(v3_send_msg_arg, list)
self.assertEqual(len(v2_send_msg_arg), len(v3_send_msg_arg))
self.assertEqual(1, len(v2_send_msg_arg))
v2_msg_as_dict = json.loads(v2_send_msg_arg[0])
v3_msg_as_dict = json.loads(v3_send_msg_arg[0])
self.assertDictEqual(v2_msg_as_dict, v3_msg_as_dict)

View File

@@ -0,0 +1,22 @@
# Copyright 2017 FUJITSU LIMITED
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from monasca_log_api import version
from monasca_log_api.tests import base
class TestAppVersion(base.BaseTestCase):
def test_should_report_version(self):
self.assertIsNotNone(version.version_str)

View File

@@ -0,0 +1,120 @@
# Copyright 2016 FUJITSU LIMITED
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import ujson as json
import falcon
from monasca_log_api.app.controller import versions
from monasca_log_api.tests import base
def _get_versioned_url(version_id):
return '/version/%s' % version_id
class TestApiVersions(base.BaseApiTestCase):
def before(self):
self.versions = versions.Versions()
self.api.add_route("/version/", self.versions)
self.api.add_route("/version/{version_id}", self.versions)
def test_should_fail_for_unsupported_version(self):
unsupported_version = 'v5.0'
uri = _get_versioned_url(unsupported_version)
self.simulate_request(
uri,
method='GET',
headers={
'Content-Type': 'application/json'
}
)
self.assertEqual(falcon.HTTP_400, self.srmock.status)
def test_should_return_all_supported_versions(self):
def _check_elements():
self.assertIn('elements', response)
elements = response.get('elements')
self.assertIsInstance(elements, list)
for el in elements:
# do checkup by expected keys
self.assertIn('id', el)
self.assertItemsEqual([
u'id',
u'links',
u'status',
u'updated'
], el.keys())
ver = el.get('id')
self.assertIn(ver, expected_versions)
def _check_global_links():
self.assertIn('links', response)
links = response.get('links')
self.assertIsInstance(links, list)
for link in links:
self.assertIn('rel', link)
key = link.get('rel')
self.assertIn(key, expected_links_keys)
expected_versions = 'v2.0', 'v3.0'
expected_links_keys = 'self', 'version', 'healthcheck'
res = self.simulate_request(
'/version',
method='GET',
headers={
'Content-Type': 'application/json'
},
decode='utf-8'
)
self.assertEqual(falcon.HTTP_200, self.srmock.status)
response = json.loads(res)
_check_elements()
_check_global_links()
def test_should_return_expected_version_id(self):
expected_versions = 'v2.0', 'v3.0'
for expected_version in expected_versions:
uri = _get_versioned_url(expected_version)
res = self.simulate_request(
uri,
method='GET',
headers={
'Content-Type': 'application/json'
},
decode='utf-8'
)
self.assertEqual(falcon.HTTP_200, self.srmock.status)
response = json.loads(res)
self.assertIn('elements', response)
self.assertIn('links', response)
elements = response.get('elements')
self.assertIsInstance(elements, list)
self.assertEqual(1, len(elements))
el = elements[0]
ver = el.get('id')
self.assertEqual(expected_version, ver)

View File

@@ -0,0 +1,18 @@
# Copyright 2017 FUJITSU LIMITED
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import pbr.version
version_info = pbr.version.VersionInfo('monasca-log-api')
version_str = version_info.version_string()