add max/sum project volume and fix tests
Adds api calls to get the max or sum volume for a specific project. Moves the logic for deciding when to skip a test that needs the Mongo map-reduce feature into a new test db module. Adds TestConnection class to avoid recreating the underlying Database object when using mim. This is necessary because spidermonkey can only create a limited number of instances per process. Change-Id: Idefc01978e41ec823533b7dbb164319a916b155c Signed-off-by: Steven Berler <steven.berler@dreamhost.com>
This commit is contained in:
@@ -60,6 +60,10 @@
|
||||
#
|
||||
# [ ] /projects/<project>/meters/<meter>/volume -- total or max volume for
|
||||
# selected meter
|
||||
# [x] /projects/<project>/meters/<meter>/volume/max -- max volume for
|
||||
# selected meter
|
||||
# [x] /projects/<project>/meters/<meter>/volume/sum -- total volume for
|
||||
# selected meter
|
||||
# [ ] /resources/<resource>/meters/<meter>/volume -- total or max volume for
|
||||
# selected meter
|
||||
# [x] /resources/<resource>/meters/<meter>/volume/max -- max volume for
|
||||
@@ -476,3 +480,67 @@ def compute_resource_volume_sum(resource, meter):
|
||||
value = results[0].get('value') # there should only be one!
|
||||
|
||||
return flask.jsonify(volume=value)
|
||||
|
||||
|
||||
@blueprint.route('/projects/<project>/meters/<meter>/volume/max')
|
||||
def compute_project_volume_max(project, meter):
|
||||
"""Return the max volume for a meter.
|
||||
|
||||
:param project: The ID of the project.
|
||||
:param meter: The name of the meter.
|
||||
:param start_timestamp: ISO-formatted string of the
|
||||
earliest time to include in the calculation.
|
||||
:param end_timestamp: ISO-formatted string of the
|
||||
latest time to include in the calculation.
|
||||
:param search_offset: Number of minutes before and
|
||||
after start and end timestamps to query.
|
||||
"""
|
||||
q_ts = _get_query_timestamps(flask.request.args)
|
||||
|
||||
f = storage.EventFilter(meter=meter,
|
||||
project=project,
|
||||
start=q_ts['query_start'],
|
||||
end=q_ts['query_end'],
|
||||
)
|
||||
# FIXME(sberler): Currently get_volume_max is really always grouping
|
||||
# by resource_id. We should add a new function in the storage driver
|
||||
# that does not do this grouping (and potentially rename the existing
|
||||
# one to get_volume_max_by_resource())
|
||||
results = list(flask.request.storage_conn.get_volume_max(f))
|
||||
value = None
|
||||
if results:
|
||||
value = max(result.get('value') for result in results)
|
||||
|
||||
return flask.jsonify(volume=value)
|
||||
|
||||
|
||||
@blueprint.route('/projects/<project>/meters/<meter>/volume/sum')
|
||||
def compute_project_volume_sum(project, meter):
|
||||
"""Return the total volume for a meter.
|
||||
|
||||
:param project: The ID of the project.
|
||||
:param meter: The name of the meter.
|
||||
:param start_timestamp: ISO-formatted string of the
|
||||
earliest time to include in the calculation.
|
||||
:param end_timestamp: ISO-formatted string of the
|
||||
latest time to include in the calculation.
|
||||
:param search_offset: Number of minutes before and
|
||||
after start and end timestamps to query.
|
||||
"""
|
||||
q_ts = _get_query_timestamps(flask.request.args)
|
||||
|
||||
f = storage.EventFilter(meter=meter,
|
||||
project=project,
|
||||
start=q_ts['query_start'],
|
||||
end=q_ts['query_end'],
|
||||
)
|
||||
# FIXME(sberler): Currently get_volume_max is really always grouping
|
||||
# by resource_id. We should add a new function in the storage driver
|
||||
# that does not do this grouping (and potentially rename the existing
|
||||
# one to get_volume_max_by_resource())
|
||||
results = list(flask.request.storage_conn.get_volume_sum(f))
|
||||
value = None
|
||||
if results:
|
||||
value = sum(result.get('value') for result in results)
|
||||
|
||||
return flask.jsonify(volume=value)
|
||||
|
||||
@@ -19,63 +19,27 @@
|
||||
"""
|
||||
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import unittest
|
||||
import urllib
|
||||
|
||||
import flask
|
||||
from ming import mim
|
||||
import mock
|
||||
|
||||
from ceilometer.tests import base as test_base
|
||||
from ceilometer.tests import db as db_test_base
|
||||
from ceilometer.api import v1
|
||||
from ceilometer.storage import impl_mongodb
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Connection(impl_mongodb.Connection):
|
||||
|
||||
def _get_connection(self, conf):
|
||||
# Use a real MongoDB server if we can connect, but fall back
|
||||
# to a Mongo-in-memory connection if we cannot.
|
||||
self.force_mongo = bool(int(os.environ.get('CEILOMETER_TEST_LIVE', 0)))
|
||||
if self.force_mongo:
|
||||
try:
|
||||
return super(Connection, self)._get_connection(conf)
|
||||
except:
|
||||
LOG.debug('Unable to connect to mongod')
|
||||
raise
|
||||
else:
|
||||
LOG.debug('Unable to connect to mongod, falling back to MIM')
|
||||
return mim.Connection()
|
||||
|
||||
|
||||
class TestBase(test_base.TestCase):
|
||||
|
||||
DBNAME = 'testdb'
|
||||
class TestBase(db_test_base.TestBase):
|
||||
|
||||
def setUp(self):
|
||||
super(TestBase, self).setUp()
|
||||
self.app = flask.Flask('test')
|
||||
self.app.register_blueprint(v1.blueprint)
|
||||
self.test_app = self.app.test_client()
|
||||
self.conf = mock.Mock()
|
||||
self.conf.metering_storage_engine = 'mongodb'
|
||||
self.conf.database_connection = 'mongodb://localhost/%s' % self.DBNAME
|
||||
self.conn = Connection(self.conf)
|
||||
self.conn.conn.drop_database(self.DBNAME)
|
||||
self.conn.conn[self.DBNAME]
|
||||
|
||||
@self.app.before_request
|
||||
def attach_storage_connection():
|
||||
flask.request.storage_conn = self.conn
|
||||
return
|
||||
|
||||
def tearDown(self):
|
||||
self.conn.conn.drop_database(self.DBNAME)
|
||||
|
||||
def get(self, path, **kwds):
|
||||
if kwds:
|
||||
query = path + '?' + urllib.urlencode(kwds)
|
||||
|
||||
98
ceilometer/tests/db.py
Normal file
98
ceilometer/tests/db.py
Normal file
@@ -0,0 +1,98 @@
|
||||
# -*- encoding: utf-8 -*-
|
||||
#
|
||||
# Copyright © 2012 New Dream Network, LLC (DreamHost)
|
||||
#
|
||||
# Author: Doug Hellmann <doug.hellmann@dreamhost.com>
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
"""Base classes for API tests.
|
||||
"""
|
||||
|
||||
import logging
|
||||
import os
|
||||
|
||||
from ming import mim
|
||||
|
||||
import mock
|
||||
|
||||
from nose.plugins import skip
|
||||
|
||||
from ceilometer.storage import impl_mongodb
|
||||
from ceilometer.tests import base as test_base
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class TestBase(test_base.TestCase):
|
||||
|
||||
DBNAME = 'testdb'
|
||||
|
||||
def setUp(self):
|
||||
super(TestBase, self).setUp()
|
||||
self.conf = mock.Mock()
|
||||
self.conf.metering_storage_engine = 'mongodb'
|
||||
self.conf.database_connection = 'mongodb://localhost/%s' % self.DBNAME
|
||||
self.conn = TestConnection(self.conf)
|
||||
self.conn.drop_database(self.DBNAME)
|
||||
self.conn.conn[self.DBNAME]
|
||||
return
|
||||
|
||||
def tearDown(self):
|
||||
self.conn.drop_database(self.DBNAME)
|
||||
|
||||
|
||||
class TestConnection(impl_mongodb.Connection):
|
||||
|
||||
_mim_instance = None
|
||||
FORCE_MONGO = bool(int(os.environ.get('CEILOMETER_TEST_LIVE', 0)))
|
||||
|
||||
def drop_database(self, database):
|
||||
if TestConnection._mim_instance is not None:
|
||||
# Don't want to use drop_database() because we
|
||||
# may end up running out of spidermonkey instances.
|
||||
# http://davisp.lighthouseapp.com/projects/26898/tickets/22
|
||||
self.conn[database].clear()
|
||||
else:
|
||||
self.conn.drop_database(database)
|
||||
|
||||
def _get_connection(self, conf):
|
||||
# Use a real MongoDB server if we can connect, but fall back
|
||||
# to a Mongo-in-memory connection if we cannot.
|
||||
if self.FORCE_MONGO:
|
||||
try:
|
||||
return super(TestConnection, self)._get_connection(conf)
|
||||
except:
|
||||
LOG.debug('Unable to connect to mongodb')
|
||||
raise
|
||||
else:
|
||||
LOG.debug('Using MIM for test connection')
|
||||
|
||||
# MIM will die if we have too many connections, so use a
|
||||
# Singleton
|
||||
if TestConnection._mim_instance is None:
|
||||
LOG.debug('Creating a new MIM Connection object')
|
||||
TestConnection._mim_instance = mim.Connection()
|
||||
return TestConnection._mim_instance
|
||||
|
||||
|
||||
def require_map_reduce(conn):
|
||||
"""Raises SkipTest if the connection is using mim.
|
||||
"""
|
||||
# NOTE(dhellmann): mim requires spidermonkey to implement the
|
||||
# map-reduce functions, so if we can't import it then just
|
||||
# skip these tests unless we aren't using mim.
|
||||
try:
|
||||
import spidermonkey
|
||||
except:
|
||||
if isinstance(conn.conn, mim.Connection):
|
||||
raise skip.SkipTest('requires spidermonkey')
|
||||
Reference in New Issue
Block a user