full pep8 compliance (part 2)

final step to pep8 compliance

Change-Id: Ibe44f55f9415dc8cc380521debee609a20a67416
This commit is contained in:
Gordon Chung 2013-11-19 14:14:24 -05:00
parent e116bffdf2
commit b5dfb0d97e
37 changed files with 541 additions and 549 deletions

View File

@ -23,7 +23,7 @@ from ceilometerclient import client as ceiloclient
from oslo.config import cfg
import six
from ceilometer.openstack.common.gettextutils import _
from ceilometer.openstack.common.gettextutils import _ # noqa
from ceilometer.openstack.common import log
LOG = log.getLogger(__name__)

View File

@ -18,7 +18,7 @@
from ceilometer.alarm import evaluator
from ceilometer.openstack.common.gettextutils import _
from ceilometer.openstack.common.gettextutils import _ # noqa
from ceilometer.openstack.common import log
LOG = log.getLogger(__name__)

View File

@ -21,7 +21,7 @@ import datetime
import operator
from ceilometer.alarm import evaluator
from ceilometer.openstack.common.gettextutils import _
from ceilometer.openstack.common.gettextutils import _ # noqa
from ceilometer.openstack.common import log
from ceilometer.openstack.common import timeutils

View File

@ -21,7 +21,7 @@ import random
import uuid
from ceilometer.alarm import rpc as rpc_alarm
from ceilometer.openstack.common.gettextutils import _
from ceilometer.openstack.common.gettextutils import _ # noqa
from ceilometer.openstack.common import log
from ceilometer.openstack.common import timeutils

View File

@ -19,10 +19,10 @@
from oslo.config import cfg
from ceilometer.openstack.common import context
from ceilometer.openstack.common.gettextutils import _
from ceilometer.openstack.common.gettextutils import _ # noqa
from ceilometer.openstack.common import log
from ceilometer.openstack.common.rpc import proxy as rpc_proxy
from ceilometer.storage.models import Alarm
from ceilometer.storage import models
OPTS = [
cfg.StrOpt('notifier_rpc_topic',
@ -46,7 +46,7 @@ class RPCAlarmNotifier(rpc_proxy.RpcProxy):
topic=cfg.CONF.alarm.notifier_rpc_topic)
def notify(self, alarm, previous, reason):
actions = getattr(alarm, Alarm.ALARM_ACTIONS_MAP[alarm.state])
actions = getattr(alarm, models.Alarm.ALARM_ACTIONS_MAP[alarm.state])
if not actions:
LOG.debug(_('alarm %(alarm_id)s has no action configured '
'for state transition from %(previous)s to '

View File

@ -27,14 +27,14 @@ from stevedore import extension
from ceilometer.alarm.partition import coordination
from ceilometer.alarm import rpc as rpc_alarm
from ceilometer.openstack.common.gettextutils import _
from ceilometer.openstack.common.gettextutils import _ # noqa
from ceilometer.openstack.common import importutils
from ceilometer.openstack.common import log
from ceilometer.openstack.common import network_utils
from ceilometer.openstack.common.rpc import dispatcher as rpc_dispatcher
from ceilometer.openstack.common.rpc import service as rpc_service
from ceilometer.openstack.common import service as os_service
from ceilometer.service import prepare_service
from ceilometer import service
OPTS = [
@ -139,9 +139,9 @@ class SingletonAlarmService(AlarmService, os_service.Service):
def alarm_evaluator():
prepare_service()
service = importutils.import_object(cfg.CONF.alarm.evaluation_service)
os_service.launch(service).wait()
service.prepare_service()
eval_service = importutils.import_object(cfg.CONF.alarm.evaluation_service)
os_service.launch(eval_service).wait()
cfg.CONF.import_opt('host', 'ceilometer.service')
@ -280,6 +280,6 @@ class AlarmNotifierService(rpc_service.Service):
def alarm_notifier():
prepare_service()
service.prepare_service()
os_service.launch(AlarmNotifierService(
cfg.CONF.host, 'ceilometer.alarm')).wait()

View File

@ -40,7 +40,7 @@ import wsmeext.pecan as wsme_pecan
from ceilometer.api import acl
from ceilometer.openstack.common import context
from ceilometer.openstack.common.gettextutils import _
from ceilometer.openstack.common.gettextutils import _ # noqa
from ceilometer.openstack.common import log
from ceilometer.openstack.common.notifier import api as notify
from ceilometer.openstack.common import strutils

View File

@ -23,12 +23,9 @@ Based on pecan.middleware.errordocument
"""
import json
from lxml import etree
import webob
from xml import etree as et
try:
from xml.etree.ElementTree import ParseError
except ImportError:
from xml.parsers.expat import ExpatError as ParseError
from ceilometer.api import hooks
from ceilometer.openstack.common import gettextutils
@ -101,19 +98,16 @@ class ParsableErrorMiddleware(object):
== 'application/xml'):
try:
# simple check xml is valid
fault = et.ElementTree.fromstring('\n'.join(app_iter))
fault = etree.fromstring('\n'.join(app_iter))
# Add the translated error to the xml data
if error is not None:
for fault_string in fault.findall('faultstring'):
fault_string.text = (
gettextutils.get_localized_message(
error, user_locale))
body = [et.ElementTree.tostring(
et.ElementTree.fromstring(
'<error_message>'
+ et.ElementTree.tostring(fault)
+ '</error_message>'))]
except ParseError as err:
body = ['<error_message>' + etree.tostring(fault)
+ '</error_message>']
except etree.XMLSyntaxError as err:
LOG.error('Error parsing HTTP response: %s' % err)
body = ['<error_message>%s' % state['status_code']
+ '</error_message>']

View File

@ -24,14 +24,14 @@ from stevedore import extension
from stevedore import named
from ceilometer.openstack.common import context
from ceilometer.openstack.common.gettextutils import _
from ceilometer.openstack.common.gettextutils import _ # noqa
from ceilometer.openstack.common import log
from ceilometer.openstack.common.rpc import dispatcher as rpc_dispatcher
from ceilometer.openstack.common.rpc import service as rpc_service
from ceilometer.openstack.common import service as os_service
from ceilometer.openstack.common import timeutils
from ceilometer import pipeline
from ceilometer.service import prepare_service
from ceilometer import service
from ceilometer.storage import models
from ceilometer import transformer
@ -117,7 +117,7 @@ class UDPCollectorService(CollectorBase, os_service.Service):
def udp_collector():
prepare_service()
service.prepare_service()
os_service.launch(UDPCollectorService()).wait()
@ -294,6 +294,6 @@ class CollectorService(CollectorBase, rpc_service.Service):
def collector():
prepare_service()
service.prepare_service()
os_service.launch(CollectorService(cfg.CONF.host,
'ceilometer.collector')).wait()

View File

@ -36,7 +36,7 @@ from nova import utils
from stevedore import extension
from ceilometer.compute.virt import inspector
from ceilometer.openstack.common.gettextutils import _
from ceilometer.openstack.common.gettextutils import _ # noqa
# This module runs inside the nova compute

View File

@ -28,7 +28,7 @@ if sys.platform == 'win32':
from oslo.config import cfg
from ceilometer.compute.virt import inspector
from ceilometer.openstack.common.gettextutils import _
from ceilometer.openstack.common.gettextutils import _ # noqa
from ceilometer.openstack.common import log as logging
CONF = cfg.CONF

View File

@ -21,7 +21,7 @@ from oslo.config import cfg
import requests
from ceilometer.central import plugin
from ceilometer.openstack.common.gettextutils import _
from ceilometer.openstack.common.gettextutils import _ # noqa
from ceilometer.openstack.common import log
from ceilometer import sample

View File

@ -19,13 +19,13 @@
"""
from __future__ import absolute_import
from urlparse import urljoin
import urlparse
from keystoneclient import exceptions
from oslo.config import cfg
from swiftclient import client as swift
from ceilometer.openstack.common.gettextutils import _
from ceilometer.openstack.common.gettextutils import _ # noqa
from ceilometer.openstack.common import log
from ceilometer.openstack.common import timeutils
from ceilometer import plugin
@ -74,7 +74,8 @@ class _Base(plugin.PollsterBase):
def _neaten_url(endpoint, tenant_id):
"""Transform the registered url to standard and valid format.
"""
return urljoin(endpoint, '/v1/' + cfg.CONF.reseller_prefix + tenant_id)
return urlparse.urljoin(endpoint,
'/v1/' + cfg.CONF.reseller_prefix + tenant_id)
class ObjectsPollster(_Base):

View File

@ -23,7 +23,7 @@ import socket
import msgpack
from oslo.config import cfg
from ceilometer.openstack.common.gettextutils import _
from ceilometer.openstack.common.gettextutils import _ # noqa
from ceilometer.openstack.common import log
from ceilometer.openstack.common import network_utils
from ceilometer import publisher

View File

@ -32,7 +32,7 @@ import bson.code
import bson.objectid
import pymongo
from ceilometer.openstack.common.gettextutils import _
from ceilometer.openstack.common.gettextutils import _ # noqa
from ceilometer.openstack.common import log
from ceilometer.openstack.common import timeutils
from ceilometer import storage

View File

@ -29,7 +29,7 @@ import urlparse
import happybase
from ceilometer.openstack.common.gettextutils import _
from ceilometer.openstack.common.gettextutils import _ # noqa
from ceilometer.openstack.common import log
from ceilometer.openstack.common import network_utils
from ceilometer.openstack.common import timeutils

View File

@ -32,7 +32,7 @@ import pymongo
from oslo.config import cfg
from ceilometer.openstack.common.gettextutils import _
from ceilometer.openstack.common.gettextutils import _ # noqa
from ceilometer.openstack.common import log
from ceilometer import storage
from ceilometer.storage import base

View File

@ -30,27 +30,13 @@ from sqlalchemy.orm import aliased
from ceilometer.openstack.common.db import exception as dbexc
import ceilometer.openstack.common.db.sqlalchemy.session as sqlalchemy_session
from ceilometer.openstack.common.gettextutils import _
from ceilometer.openstack.common.gettextutils import _ # noqa
from ceilometer.openstack.common import log
from ceilometer.openstack.common import timeutils
from ceilometer.storage import base
from ceilometer.storage import models as api_models
from ceilometer.storage.sqlalchemy import migration
from ceilometer.storage.sqlalchemy.models import Alarm
from ceilometer.storage.sqlalchemy.models import AlarmChange
from ceilometer.storage.sqlalchemy.models import Base
from ceilometer.storage.sqlalchemy.models import Event
from ceilometer.storage.sqlalchemy.models import MetaBool
from ceilometer.storage.sqlalchemy.models import MetaFloat
from ceilometer.storage.sqlalchemy.models import MetaInt
from ceilometer.storage.sqlalchemy.models import MetaText
from ceilometer.storage.sqlalchemy.models import Meter
from ceilometer.storage.sqlalchemy.models import Project
from ceilometer.storage.sqlalchemy.models import Resource
from ceilometer.storage.sqlalchemy.models import Source
from ceilometer.storage.sqlalchemy.models import Trait
from ceilometer.storage.sqlalchemy.models import UniqueName
from ceilometer.storage.sqlalchemy.models import User
from ceilometer.storage.sqlalchemy import models
from ceilometer import utils
LOG = log.getLogger(__name__)
@ -106,13 +92,13 @@ class SQLAlchemyStorage(base.StorageEngine):
return Connection(conf)
META_TYPE_MAP = {bool: MetaBool,
str: MetaText,
unicode: MetaText,
types.NoneType: MetaText,
int: MetaInt,
long: MetaInt,
float: MetaFloat}
META_TYPE_MAP = {bool: models.MetaBool,
str: models.MetaText,
unicode: models.MetaText,
types.NoneType: models.MetaText,
int: models.MetaInt,
long: models.MetaInt,
float: models.MetaFloat}
def apply_metaquery_filter(session, query, metaquery):
@ -148,23 +134,23 @@ def make_query_from_filter(session, query, sample_filter, require_meter=True):
"""
if sample_filter.meter:
query = query.filter(Meter.counter_name == sample_filter.meter)
query = query.filter(models.Meter.counter_name == sample_filter.meter)
elif require_meter:
raise RuntimeError(_('Missing required meter specifier'))
if sample_filter.source:
query = query.filter(Meter.sources.any(id=sample_filter.source))
query = query.filter(models.Meter.sources.any(id=sample_filter.source))
if sample_filter.start:
ts_start = sample_filter.start
if sample_filter.start_timestamp_op == 'gt':
query = query.filter(Meter.timestamp > ts_start)
query = query.filter(models.Meter.timestamp > ts_start)
else:
query = query.filter(Meter.timestamp >= ts_start)
query = query.filter(models.Meter.timestamp >= ts_start)
if sample_filter.end:
ts_end = sample_filter.end
if sample_filter.end_timestamp_op == 'le':
query = query.filter(Meter.timestamp <= ts_end)
query = query.filter(models.Meter.timestamp <= ts_end)
else:
query = query.filter(Meter.timestamp < ts_end)
query = query.filter(models.Meter.timestamp < ts_end)
if sample_filter.user:
query = query.filter_by(user_id=sample_filter.user)
if sample_filter.project:
@ -195,7 +181,7 @@ class Connection(base.Connection):
def clear(self):
session = sqlalchemy_session.get_session()
engine = session.get_bind()
for table in reversed(Base.metadata.sorted_tables):
for table in reversed(models.Base.metadata.sorted_tables):
engine.execute(table.delete())
@staticmethod
@ -244,20 +230,22 @@ class Connection(base.Connection):
with session.begin():
# Record the updated resource metadata
rmetadata = data['resource_metadata']
source = cls._create_or_update(session, Source, data['source'])
user = cls._create_or_update(session, User, data['user_id'],
source = cls._create_or_update(session, models.Source,
data['source'])
user = cls._create_or_update(session, models.User, data['user_id'],
source)
project = cls._create_or_update(session, Project,
project = cls._create_or_update(session, models.Project,
data['project_id'], source)
resource = cls._create_or_update(session, Resource,
resource = cls._create_or_update(session, models.Resource,
data['resource_id'], source,
user=user, project=project,
resource_metadata=rmetadata)
# Record the raw data for the meter.
meter = Meter(counter_type=data['counter_type'],
counter_unit=data['counter_unit'],
counter_name=data['counter_name'], resource=resource)
meter = models.Meter(counter_type=data['counter_type'],
counter_unit=data['counter_unit'],
counter_name=data['counter_name'],
resource=resource)
session.add(meter)
if not filter(lambda x: x.id == source.id, meter.sources):
meter.sources.append(source)
@ -292,24 +280,26 @@ class Connection(base.Connection):
"""
session = sqlalchemy_session.get_session()
query = session.query(Meter.id)
query = session.query(models.Meter.id)
end = timeutils.utcnow() - datetime.timedelta(seconds=ttl)
query = query.filter(Meter.timestamp < end)
query = query.filter(models.Meter.timestamp < end)
query.delete()
query = session.query(User.id).filter(~User.id.in_(
session.query(Meter.user_id).group_by(Meter.user_id)
query = session.query(models.User.id).filter(~models.User.id.in_(
session.query(models.Meter.user_id).group_by(models.Meter.user_id)
))
query.delete(synchronize_session='fetch')
query = session.query(Project.id).filter(~Project.id.in_(
session.query(Meter.project_id).group_by(Meter.project_id)
))
query = session.query(models.Project.id)\
.filter(~models.Project.id.in_(
session.query(models.Meter.project_id).group_by(
models.Meter.project_id)))
query.delete(synchronize_session='fetch')
query = session.query(Resource.id).filter(~Resource.id.in_(
session.query(Meter.resource_id).group_by(Meter.resource_id)
))
query = session.query(models.Resource.id)\
.filter(~models.Resource.id.in_(
session.query(models.Meter.resource_id).group_by(
models.Meter.resource_id)))
query.delete(synchronize_session='fetch')
@staticmethod
@ -319,9 +309,9 @@ class Connection(base.Connection):
:param source: Optional source filter.
"""
session = sqlalchemy_session.get_session()
query = session.query(User.id)
query = session.query(models.User.id)
if source is not None:
query = query.filter(User.sources.any(id=source))
query = query.filter(models.User.sources.any(id=source))
return (x[0] for x in query.all())
@staticmethod
@ -331,9 +321,9 @@ class Connection(base.Connection):
:param source: Optional source filter.
"""
session = sqlalchemy_session.get_session()
query = session.query(Project.id)
query = session.query(models.Project.id)
if source:
query = query.filter(Project.sources.any(id=source))
query = query.filter(models.Project.sources.any(id=source))
return (x[0] for x in query.all())
@staticmethod
@ -367,21 +357,21 @@ class Connection(base.Connection):
session = sqlalchemy_session.get_session()
ts_subquery = session.query(
Meter.resource_id,
func.max(Meter.timestamp).label("max_ts"),
func.min(Meter.timestamp).label("min_ts")
).group_by(Meter.resource_id)
models.Meter.resource_id,
func.max(models.Meter.timestamp).label("max_ts"),
func.min(models.Meter.timestamp).label("min_ts")
).group_by(models.Meter.resource_id)
# Here are the basic 'eq' operation filters for the sample data.
for column, value in [(Meter.resource_id, resource),
(Meter.user_id, user),
(Meter.project_id, project)]:
for column, value in [(models.Meter.resource_id, resource),
(models.Meter.user_id, user),
(models.Meter.project_id, project)]:
if value:
ts_subquery = ts_subquery.filter(column == value)
if source:
ts_subquery = ts_subquery.filter(
Meter.sources.any(id=source))
models.Meter.sources.any(id=source))
if metaquery:
ts_subquery = apply_metaquery_filter(session,
@ -393,31 +383,27 @@ class Connection(base.Connection):
if start_timestamp:
if start_timestamp_op == 'gt':
ts_subquery = ts_subquery.filter(
Meter.timestamp > start_timestamp
)
models.Meter.timestamp > start_timestamp)
else:
ts_subquery = ts_subquery.filter(
Meter.timestamp >= start_timestamp
)
models.Meter.timestamp >= start_timestamp)
if end_timestamp:
if end_timestamp_op == 'le':
ts_subquery = ts_subquery.filter(
Meter.timestamp <= end_timestamp
)
models.Meter.timestamp <= end_timestamp)
else:
ts_subquery = ts_subquery.filter(
Meter.timestamp < end_timestamp
)
models.Meter.timestamp < end_timestamp)
ts_subquery = ts_subquery.subquery()
# Now we need to get the max Meter.id out of the leftover results, to
# break any ties.
agg_subquery = session.query(
func.max(Meter.id).label("max_id"),
func.max(models.Meter.id).label("max_id"),
ts_subquery
).filter(
Meter.resource_id == ts_subquery.c.resource_id,
Meter.timestamp == ts_subquery.c.max_ts
models.Meter.resource_id == ts_subquery.c.resource_id,
models.Meter.timestamp == ts_subquery.c.max_ts
).group_by(
ts_subquery.c.resource_id,
ts_subquery.c.max_ts,
@ -425,11 +411,11 @@ class Connection(base.Connection):
).subquery()
query = session.query(
Meter,
models.Meter,
agg_subquery.c.min_ts,
agg_subquery.c.max_ts
).filter(
Meter.id == agg_subquery.c.max_id
models.Meter.id == agg_subquery.c.max_id
)
for meter, first_ts, last_ts in query.all():
@ -476,8 +462,9 @@ class Connection(base.Connection):
# max() is used to choice a meter record, so the latest record
# is selected for each (resource_id, counter_name).
#
subquery_meter = session.query(func.max(Meter.id).label('id')).\
group_by(Meter.resource_id, Meter.counter_name).subquery()
subquery_meter = session.query(func.max(models.Meter.id).label('id'))\
.group_by(models.Meter.resource_id,
models.Meter.counter_name).subquery()
# The SQL of query_meter is essentially:
#
@ -486,26 +473,26 @@ class Connection(base.Connection):
# GROUP BY meter.resource_id, meter.counter_name) AS anon_2
# ON meter.id = anon_2.id
#
query_meter = session.query(Meter).\
join(subquery_meter, Meter.id == subquery_meter.c.id)
query_meter = session.query(models.Meter).\
join(subquery_meter, models.Meter.id == subquery_meter.c.id)
if metaquery:
query_meter = apply_metaquery_filter(session,
query_meter,
metaquery)
alias_meter = aliased(Meter, query_meter.subquery())
query = session.query(Resource, alias_meter).join(
alias_meter, Resource.id == alias_meter.resource_id)
alias_meter = aliased(models.Meter, query_meter.subquery())
query = session.query(models.Resource, alias_meter).join(
alias_meter, models.Resource.id == alias_meter.resource_id)
if user is not None:
query = query.filter(Resource.user_id == user)
query = query.filter(models.Resource.user_id == user)
if source is not None:
query = query.filter(Resource.sources.any(id=source))
query = query.filter(models.Resource.sources.any(id=source))
if resource:
query = query.filter(Resource.id == resource)
query = query.filter(models.Resource.id == resource)
if project is not None:
query = query.filter(Resource.project_id == project)
query = query.filter(models.Resource.project_id == project)
for resource, meter in query.all():
yield api_models.Meter(
@ -528,12 +515,13 @@ class Connection(base.Connection):
return
session = sqlalchemy_session.get_session()
query = session.query(Meter)
query = session.query(models.Meter)
query = make_query_from_filter(session, query, sample_filter,
require_meter=False)
if limit:
query = query.limit(limit)
samples = query.from_self().order_by(desc(Meter.timestamp)).all()
samples = query.from_self()\
.order_by(desc(models.Meter.timestamp)).all()
for s in samples:
# Remove the id generated by the database when
@ -560,20 +548,20 @@ class Connection(base.Connection):
@staticmethod
def _make_stats_query(sample_filter, groupby):
select = [
Meter.counter_unit.label('unit'),
func.min(Meter.timestamp).label('tsmin'),
func.max(Meter.timestamp).label('tsmax'),
func.avg(Meter.counter_volume).label('avg'),
func.sum(Meter.counter_volume).label('sum'),
func.min(Meter.counter_volume).label('min'),
func.max(Meter.counter_volume).label('max'),
func.count(Meter.counter_volume).label('count'),
models.Meter.counter_unit.label('unit'),
func.min(models.Meter.timestamp).label('tsmin'),
func.max(models.Meter.timestamp).label('tsmax'),
func.avg(models.Meter.counter_volume).label('avg'),
func.sum(models.Meter.counter_volume).label('sum'),
func.min(models.Meter.counter_volume).label('min'),
func.max(models.Meter.counter_volume).label('max'),
func.count(models.Meter.counter_volume).label('count'),
]
session = sqlalchemy_session.get_session()
if groupby:
group_attributes = [getattr(Meter, g) for g in groupby]
group_attributes = [getattr(models.Meter, g) for g in groupby]
select.extend(group_attributes)
query = session.query(*select)
@ -640,8 +628,8 @@ class Connection(base.Connection):
sample_filter.start or res.tsmin,
sample_filter.end or res.tsmax,
period):
q = query.filter(Meter.timestamp >= period_start)
q = q.filter(Meter.timestamp < period_end)
q = query.filter(models.Meter.timestamp >= period_start)
q = q.filter(models.Meter.timestamp < period_end)
for r in q.all():
if r.count:
yield self._stats_result_to_model(
@ -686,17 +674,17 @@ class Connection(base.Connection):
raise NotImplementedError(_('Pagination not implemented'))
session = sqlalchemy_session.get_session()
query = session.query(Alarm)
query = session.query(models.Alarm)
if name is not None:
query = query.filter(Alarm.name == name)
query = query.filter(models.Alarm.name == name)
if enabled is not None:
query = query.filter(Alarm.enabled == enabled)
query = query.filter(models.Alarm.enabled == enabled)
if user is not None:
query = query.filter(Alarm.user_id == user)
query = query.filter(models.Alarm.user_id == user)
if project is not None:
query = query.filter(Alarm.project_id == project)
query = query.filter(models.Alarm.project_id == project)
if alarm_id is not None:
query = query.filter(Alarm.id == alarm_id)
query = query.filter(models.Alarm.id == alarm_id)
return (self._row_to_alarm_model(x) for x in query.all())
@ -707,9 +695,9 @@ class Connection(base.Connection):
"""
session = sqlalchemy_session.get_session()
with session.begin():
session.merge(User(id=alarm.user_id))
session.merge(Project(id=alarm.project_id))
alarm_row = Alarm(id=alarm.alarm_id)
session.merge(models.User(id=alarm.user_id))
session.merge(models.Project(id=alarm.project_id))
alarm_row = models.Alarm(id=alarm.alarm_id)
alarm_row.update(alarm.as_dict())
session.add(alarm_row)
session.flush()
@ -723,7 +711,7 @@ class Connection(base.Connection):
"""
session = sqlalchemy_session.get_session()
with session.begin():
alarm_row = session.merge(Alarm(id=alarm.alarm_id))
alarm_row = session.merge(models.Alarm(id=alarm.alarm_id))
alarm_row.update(alarm.as_dict())
session.flush()
@ -737,7 +725,8 @@ class Connection(base.Connection):
"""
session = sqlalchemy_session.get_session()
with session.begin():
session.query(Alarm).filter(Alarm.id == alarm_id).delete()
session.query(models.Alarm).filter(
models.Alarm.id == alarm_id).delete()
session.flush()
@staticmethod
@ -779,29 +768,34 @@ class Connection(base.Connection):
:param end_timestamp_op: Optional timestamp end range operation
"""
session = sqlalchemy_session.get_session()
query = session.query(AlarmChange)
query = query.filter(AlarmChange.alarm_id == alarm_id)
query = session.query(models.AlarmChange)
query = query.filter(models.AlarmChange.alarm_id == alarm_id)
if on_behalf_of is not None:
query = query.filter(AlarmChange.on_behalf_of == on_behalf_of)
query = query.filter(
models.AlarmChange.on_behalf_of == on_behalf_of)
if user is not None:
query = query.filter(AlarmChange.user_id == user)
query = query.filter(models.AlarmChange.user_id == user)
if project is not None:
query = query.filter(AlarmChange.project_id == project)
query = query.filter(models.AlarmChange.project_id == project)
if type is not None:
query = query.filter(AlarmChange.type == type)
query = query.filter(models.AlarmChange.type == type)
if start_timestamp:
if start_timestamp_op == 'gt':
query = query.filter(AlarmChange.timestamp > start_timestamp)
query = query.filter(
models.AlarmChange.timestamp > start_timestamp)
else:
query = query.filter(AlarmChange.timestamp >= start_timestamp)
query = query.filter(
models.AlarmChange.timestamp >= start_timestamp)
if end_timestamp:
if end_timestamp_op == 'le':
query = query.filter(AlarmChange.timestamp <= end_timestamp)
query = query.filter(
models.AlarmChange.timestamp <= end_timestamp)
else:
query = query.filter(AlarmChange.timestamp < end_timestamp)
query = query.filter(
models.AlarmChange.timestamp < end_timestamp)
query = query.order_by(desc(AlarmChange.timestamp))
query = query.order_by(desc(models.AlarmChange.timestamp))
return (self._row_to_alarm_change_model(x) for x in query.all())
def record_alarm_change(self, alarm_change):
@ -809,17 +803,19 @@ class Connection(base.Connection):
"""
session = sqlalchemy_session.get_session()
with session.begin():
session.merge(User(id=alarm_change['user_id']))
session.merge(Project(id=alarm_change['project_id']))
session.merge(Project(id=alarm_change['on_behalf_of']))
alarm_change_row = AlarmChange(event_id=alarm_change['event_id'])
session.merge(models.User(id=alarm_change['user_id']))
session.merge(models.Project(id=alarm_change['project_id']))
session.merge(models.Project(id=alarm_change['on_behalf_of']))
alarm_change_row = models.AlarmChange(
event_id=alarm_change['event_id'])
alarm_change_row.update(alarm_change)
session.add(alarm_change_row)
session.flush()
@staticmethod
def _get_unique(session, key):
return session.query(UniqueName).filter(UniqueName.key == key).first()
return session.query(models.UniqueName)\
.filter(models.UniqueName.key == key).first()
def _get_or_create_unique_name(self, key, session=None):
"""Find the UniqueName entry for a given key, creating
@ -832,7 +828,7 @@ class Connection(base.Connection):
with session.begin(subtransactions=True):
unique = self._get_unique(session, key)
if not unique:
unique = UniqueName(key=key)
unique = models.UniqueName(key=key)
session.add(unique)
session.flush()
return unique
@ -844,14 +840,14 @@ class Connection(base.Connection):
"""
name = self._get_or_create_unique_name(trait_model.name,
session=session)
value_map = Trait._value_map
value_map = models.Trait._value_map
values = {'t_string': None, 't_float': None,
't_int': None, 't_datetime': None}
value = trait_model.value
if trait_model.dtype == api_models.Trait.DATETIME_TYPE:
value = utils.dt_to_decimal(value)
values[value_map[trait_model.dtype]] = value
return Trait(name, event, trait_model.dtype, **values)
return models.Trait(name, event, trait_model.dtype, **values)
def _record_event(self, session, event_model):
"""Store a single Event, including related Traits.
@ -861,7 +857,7 @@ class Connection(base.Connection):
session=session)
generated = utils.dt_to_decimal(event_model.generated)
event = Event(event_model.message_id, unique, generated)
event = models.Event(event_model.message_id, unique, generated)
session.add(event)
new_traits = []
@ -913,14 +909,15 @@ class Connection(base.Connection):
end = utils.dt_to_decimal(event_filter.end)
session = sqlalchemy_session.get_session()
with session.begin():
event_query_filters = [Event.generated >= start,
Event.generated <= end]
sub_query = session.query(Event.id)\
.join(Trait, Trait.event_id == Event.id)
event_query_filters = [models.Event.generated >= start,
models.Event.generated <= end]
sub_query = session.query(models.Event.id)\
.join(models.Trait, models.Trait.event_id == models.Event.id)
if event_filter.event_name:
event_name = self._get_unique(session, event_filter.event_name)
event_query_filters.append(Event.unique_name == event_name)
event_query_filters.append(
models.Event.unique_name == event_name)
sub_query = sub_query.filter(*event_query_filters)
@ -929,19 +926,24 @@ class Connection(base.Connection):
for key, value in event_filter.traits.iteritems():
if key == 'key':
key = self._get_unique(session, value)
sub_query = sub_query.filter(Trait.name == key)
sub_query = sub_query.filter(models.Trait.name == key)
elif key == 't_string':
sub_query = sub_query.filter(Trait.t_string == value)
sub_query = sub_query.filter(
models.Trait.t_string == value)
elif key == 't_int':
sub_query = sub_query.filter(Trait.t_int == value)
sub_query = sub_query.filter(
models.Trait.t_int == value)
elif key == 't_datetime':
dt = utils.dt_to_decimal(value)
sub_query = sub_query.filter(Trait.t_datetime == dt)
sub_query = sub_query.filter(
models.Trait.t_datetime == dt)
elif key == 't_float':
sub_query = sub_query.filter(Trait.t_datetime == value)
sub_query = sub_query.filter(
models.Trait.t_datetime == value)
else:
# Pre-populate event_models_dict to cover Events without traits
events = session.query(Event).filter(*event_query_filters)
events = session.query(models.Event)\
.filter(*event_query_filters)
for db_event in events.all():
generated = utils.decimal_to_dt(db_event.generated)
api_event = api_models.Event(db_event.message_id,
@ -951,8 +953,8 @@ class Connection(base.Connection):
sub_query = sub_query.subquery()
all_data = session.query(Trait)\
.join(sub_query, Trait.event_id == sub_query.c.id)
all_data = session.query(models.Trait)\
.join(sub_query, models.Trait.event_id == sub_query.c.id)
# Now convert the sqlalchemy objects back into Models ...
for trait in all_data.all():

View File

@ -11,8 +11,9 @@
# under the License.
from __future__ import with_statement
from logging import config as log_config
from alembic import context
from logging.config import fileConfig
import ceilometer.openstack.common.db.sqlalchemy.session as sqlalchemy_session
from ceilometer.storage.sqlalchemy import models
@ -23,7 +24,7 @@ config = context.config
# Interpret the config file for Python logging.
# This line sets up loggers basically.
fileConfig(config.config_file_name)
log_config.fileConfig(config.config_file_name)
# add your model's MetaData object here
# for 'autogenerate' support

View File

@ -15,16 +15,16 @@
from sqlalchemy import Index
from ceilometer.storage.sqlalchemy.models import Meter
from ceilometer.storage.sqlalchemy import models
def upgrade(migrate_engine):
index = Index('idx_meter_rid_cname', Meter.resource_id,
Meter.counter_name)
index = Index('idx_meter_rid_cname', models.Meter.resource_id,
models.Meter.counter_name)
index.create(bind=migrate_engine)
def downgrade(migrate_engine):
index = Index('idx_meter_rid_cname', Meter.resource_id,
Meter.counter_name)
index = Index('idx_meter_rid_cname', models.Meter.resource_id,
models.Meter.counter_name)
index.drop(bind=migrate_engine)

View File

@ -18,7 +18,7 @@
import sqlalchemy as sa
from ceilometer.storage.sqlalchemy.models import PreciseTimestamp
from ceilometer.storage.sqlalchemy import models
_col = 'timestamp'
@ -63,7 +63,8 @@ def upgrade(migrate_engine):
if migrate_engine.name == 'mysql':
meta = sa.MetaData(bind=migrate_engine)
meter = sa.Table('meter', meta, autoload=True)
_convert_data_type(meter, _col, sa.DateTime(), PreciseTimestamp(),
_convert_data_type(meter, _col, sa.DateTime(),
models.PreciseTimestamp(),
pk_attr='id', index=True)
@ -71,5 +72,5 @@ def downgrade(migrate_engine):
if migrate_engine.name == 'mysql':
meta = sa.MetaData(bind=migrate_engine)
meter = sa.Table('meter', meta, autoload=True)
_convert_data_type(meter, _col, PreciseTimestamp(), sa.DateTime(),
pk_attr='id', index=True)
_convert_data_type(meter, _col, models.PreciseTimestamp(),
sa.DateTime(), pk_attr='id', index=True)

View File

@ -17,7 +17,7 @@
# under the License.
"""Tests for ceilometer.alarm.service.PartitionedAlarmService.
"""
from contextlib import nested
import contextlib
import mock
from stevedore import extension
@ -61,8 +61,9 @@ class TestPartitionedAlarmService(test.BaseTestCase):
group='alarm')
get_client = 'ceilometerclient.client.get_client'
create_conn = 'ceilometer.openstack.common.rpc.create_connection'
with nested(mock.patch(get_client, return_value=self.api_client),
mock.patch(create_conn)):
with contextlib.nested(mock.patch(get_client,
return_value=self.api_client),
mock.patch(create_conn)):
self.partitioned.start()
pc = self.partitioned.partition_coordinator
expected = [

View File

@ -18,7 +18,7 @@
import uuid
from ceilometerclient.v2.alarms import Alarm as AlarmClient
from ceilometerclient.v2 import alarms
import mock
from ceilometer.alarm import rpc as rpc_alarm
@ -27,7 +27,7 @@ from ceilometer.openstack.common.fixture import moxstubout
from ceilometer.openstack.common import rpc
from ceilometer.openstack.common import test
from ceilometer.openstack.common import timeutils
from ceilometer.storage.models import Alarm as AlarmModel
from ceilometer.storage import models
class TestRPCAlarmNotifier(test.BaseTestCase):
@ -42,7 +42,7 @@ class TestRPCAlarmNotifier(test.BaseTestCase):
self.stubs.Set(rpc, 'cast', self.faux_cast)
self.notifier = rpc_alarm.RPCAlarmNotifier()
self.alarms = [
AlarmClient(None, info={
alarms.Alarm(None, info={
'name': 'instance_running_hot',
'meter_name': 'cpu_util',
'comparison_operator': 'gt',
@ -58,7 +58,7 @@ class TestRPCAlarmNotifier(test.BaseTestCase):
'matching_metadata':{'resource_id':
'my_instance'}
}),
AlarmClient(None, info={
alarms.Alarm(None, info={
'name': 'group_running_idle',
'meter_name': 'cpu_util',
'comparison_operator': 'le',
@ -82,7 +82,7 @@ class TestRPCAlarmNotifier(test.BaseTestCase):
self.notifier.notify(a, previous[i], "what? %d" % i)
self.assertEqual(len(self.notified), 2)
for i, a in enumerate(self.alarms):
actions = getattr(a, AlarmModel.ALARM_ACTIONS_MAP[a.state])
actions = getattr(a, models.Alarm.ALARM_ACTIONS_MAP[a.state])
self.assertEqual(self.notified[i][0],
self.CONF.alarm.notifier_rpc_topic)
self.assertEqual(self.notified[i][1]["args"]["data"]["alarm_id"],
@ -102,7 +102,7 @@ class TestRPCAlarmNotifier(test.BaseTestCase):
self.assertTrue(isinstance(reason, basestring))
def test_notify_no_actions(self):
alarm = AlarmClient(None, info={
alarm = alarms.Alarm(None, info={
'name': 'instance_running_hot',
'meter_name': 'cpu_util',
'comparison_operator': 'gt',

View File

@ -28,7 +28,7 @@ import uuid
import mock
import testscenarios
from ceilometer.storage.models import Alarm
from ceilometer.storage import models
from ceilometer.tests.api.v2 import FunctionalTest
from ceilometer.tests import db as tests_db
@ -53,104 +53,99 @@ class TestAlarms(FunctionalTest,
super(TestAlarms, self).setUp()
self.auth_headers = {'X-User-Id': str(uuid.uuid4()),
'X-Project-Id': str(uuid.uuid4())}
for alarm in [Alarm(name='name1',
type='threshold',
enabled=True,
alarm_id='a',
description='a',
state='insufficient data',
state_timestamp=None,
timestamp=None,
ok_actions=[],
insufficient_data_actions=[],
alarm_actions=[],
repeat_actions=True,
user_id=self.auth_headers['X-User-Id'],
project_id=self.auth_headers['X-Project-Id'],
rule=dict(comparison_operator='gt',
threshold=2.0,
statistic='avg',
evaluation_periods=60,
period=1,
meter_name='meter.test',
query=[
{'field': 'project_id',
for alarm in [
models.Alarm(name='name1',
type='threshold',
enabled=True,
alarm_id='a',
description='a',
state='insufficient data',
state_timestamp=None,
timestamp=None,
ok_actions=[],
insufficient_data_actions=[],
alarm_actions=[],
repeat_actions=True,
user_id=self.auth_headers['X-User-Id'],
project_id=self.auth_headers['X-Project-Id'],
rule=dict(comparison_operator='gt',
threshold=2.0,
statistic='avg',
evaluation_periods=60,
period=1,
meter_name='meter.test',
query=[{'field': 'project_id',
'op': 'eq', 'value':
self.auth_headers['X-Project-Id']}
])
),
Alarm(name='name2',
type='threshold',
enabled=True,
alarm_id='b',
description='b',
state='insufficient data',
state_timestamp=None,
timestamp=None,
ok_actions=[],
insufficient_data_actions=[],
alarm_actions=[],
repeat_actions=False,
user_id=self.auth_headers['X-User-Id'],
project_id=self.auth_headers['X-Project-Id'],
rule=dict(comparison_operator='gt',
threshold=4.0,
statistic='avg',
evaluation_periods=60,
period=1,
meter_name='meter.test',
query=[
{'field': 'project_id',
])
),
models.Alarm(name='name2',
type='threshold',
enabled=True,
alarm_id='b',
description='b',
state='insufficient data',
state_timestamp=None,
timestamp=None,
ok_actions=[],
insufficient_data_actions=[],
alarm_actions=[],
repeat_actions=False,
user_id=self.auth_headers['X-User-Id'],
project_id=self.auth_headers['X-Project-Id'],
rule=dict(comparison_operator='gt',
threshold=4.0,
statistic='avg',
evaluation_periods=60,
period=1,
meter_name='meter.test',
query=[{'field': 'project_id',
'op': 'eq', 'value':
self.auth_headers['X-Project-Id']}
])
),
Alarm(name='name3',
type='threshold',
enabled=True,
alarm_id='c',
description='c',
state='insufficient data',
state_timestamp=None,
timestamp=None,
ok_actions=[],
insufficient_data_actions=[],
alarm_actions=[],
repeat_actions=False,
user_id=self.auth_headers['X-User-Id'],
project_id=self.auth_headers['X-Project-Id'],
rule=dict(comparison_operator='gt',
threshold=3.0,
statistic='avg',
evaluation_periods=60,
period=1,
meter_name='meter.mine',
query=[
{'field': 'project_id',
])
),
models.Alarm(name='name3',
type='threshold',
enabled=True,
alarm_id='c',
description='c',
state='insufficient data',
state_timestamp=None,
timestamp=None,
ok_actions=[],
insufficient_data_actions=[],
alarm_actions=[],
repeat_actions=False,
user_id=self.auth_headers['X-User-Id'],
project_id=self.auth_headers['X-Project-Id'],
rule=dict(comparison_operator='gt',
threshold=3.0,
statistic='avg',
evaluation_periods=60,
period=1,
meter_name='meter.mine',
query=[{'field': 'project_id',
'op': 'eq', 'value':
self.auth_headers['X-Project-Id']}
])
),
Alarm(name='name4',
type='combination',
enabled=True,
alarm_id='d',
description='d',
state='insufficient data',
state_timestamp=None,
timestamp=None,
ok_actions=[],
insufficient_data_actions=[],
alarm_actions=[],
repeat_actions=False,
user_id=self.auth_headers['X-User-Id'],
project_id=self.auth_headers['X-Project-Id'],
rule=dict(alarm_ids=[
'a',
'b'],
operator='or')
)
]:
])
),
models.Alarm(name='name4',
type='combination',
enabled=True,
alarm_id='d',
description='d',
state='insufficient data',
state_timestamp=None,
timestamp=None,
ok_actions=[],
insufficient_data_actions=[],
alarm_actions=[],
repeat_actions=False,
user_id=self.auth_headers['X-User-Id'],
project_id=self.auth_headers['X-Project-Id'],
rule=dict(alarm_ids=['a', 'b'],
operator='or')
)]:
self.conn.update_alarm(alarm)
def test_list_alarms(self):
@ -189,21 +184,21 @@ class TestAlarms(FunctionalTest,
self.assertEqual(one['repeat_actions'], alarms[0]['repeat_actions'])
def test_get_alarm_disabled(self):
alarm = Alarm(name='disabled',
type='combination',
enabled=False,
alarm_id='d',
description='d',
state='insufficient data',
state_timestamp=None,
timestamp=None,
ok_actions=[],
insufficient_data_actions=[],
alarm_actions=[],
repeat_actions=False,
user_id=self.auth_headers['X-User-Id'],
project_id=self.auth_headers['X-Project-Id'],
rule=dict(alarm_ids=['a', 'b'], operator='or'))
alarm = models.Alarm(name='disabled',
type='combination',
enabled=False,
alarm_id='d',
description='d',
state='insufficient data',
state_timestamp=None,
timestamp=None,
ok_actions=[],
insufficient_data_actions=[],
alarm_actions=[],
repeat_actions=False,
user_id=self.auth_headers['X-User-Id'],
project_id=self.auth_headers['X-Project-Id'],
rule=dict(alarm_ids=['a', 'b'], operator='or'))
self.conn.update_alarm(alarm)
alarms = self.get_json('/alarms',
@ -222,15 +217,13 @@ class TestAlarms(FunctionalTest,
}])
self.assertEqual(alarms[0]['name'], 'name4')
self.assertEqual(alarms[0]['combination_rule']['alarm_ids'],
['a',
'b'])
['a', 'b'])
self.assertEqual(alarms[0]['combination_rule']['operator'], 'or')
one = self.get_json('/alarms/%s' % alarms[0]['alarm_id'])
self.assertEqual(one['name'], 'name4')
self.assertEqual(alarms[0]['combination_rule']['alarm_ids'],
['a',
'b'])
['a', 'b'])
self.assertEqual(alarms[0]['combination_rule']['operator'], 'or')
self.assertEqual(one['alarm_id'], alarms[0]['alarm_id'])
self.assertEqual(one['repeat_actions'], alarms[0]['repeat_actions'])

View File

@ -21,7 +21,6 @@ import mock
import wsme
from ceilometer.api.controllers import v2 as api
from ceilometer.api.controllers.v2 import Query
from ceilometer.openstack.common.fixture.mockpatch import PatchObject
from ceilometer.openstack.common import test
from ceilometer.openstack.common import timeutils
@ -36,110 +35,110 @@ class TestQuery(test.BaseTestCase):
'pecan.response', mock.MagicMock()))
def test_get_value_as_type_with_integer(self):
query = Query(field='metadata.size',
op='eq',
value='123',
type='integer')
query = api.Query(field='metadata.size',
op='eq',
value='123',
type='integer')
expected = 123
self.assertEqual(query._get_value_as_type(), expected)
def test_get_value_as_type_with_float(self):
query = Query(field='metadata.size',
op='eq',
value='123.456',
type='float')
query = api.Query(field='metadata.size',
op='eq',
value='123.456',
type='float')
expected = 123.456
self.assertEqual(query._get_value_as_type(), expected)
def test_get_value_as_type_with_boolean(self):
query = Query(field='metadata.is_public',
op='eq',
value='True',
type='boolean')
query = api.Query(field='metadata.is_public',
op='eq',
value='True',
type='boolean')
expected = True
self.assertEqual(query._get_value_as_type(), expected)
def test_get_value_as_type_with_string(self):
query = Query(field='metadata.name',
op='eq',
value='linux',
type='string')
query = api.Query(field='metadata.name',
op='eq',
value='linux',
type='string')
expected = 'linux'
self.assertEqual(query._get_value_as_type(), expected)
def test_get_value_as_type_with_integer_without_type(self):
query = Query(field='metadata.size',
op='eq',
value='123')
query = api.Query(field='metadata.size',
op='eq',
value='123')
expected = 123
self.assertEqual(query._get_value_as_type(), expected)
def test_get_value_as_type_with_float_without_type(self):
query = Query(field='metadata.size',
op='eq',
value='123.456')
query = api.Query(field='metadata.size',
op='eq',
value='123.456')
expected = 123.456
self.assertEqual(query._get_value_as_type(), expected)
def test_get_value_as_type_with_boolean_without_type(self):
query = Query(field='metadata.is_public',
op='eq',
value='True')
query = api.Query(field='metadata.is_public',
op='eq',
value='True')
expected = True
self.assertEqual(query._get_value_as_type(), expected)
def test_get_value_as_type_with_string_without_type(self):
query = Query(field='metadata.name',
op='eq',
value='linux')
query = api.Query(field='metadata.name',
op='eq',
value='linux')
expected = 'linux'
self.assertEqual(query._get_value_as_type(), expected)
def test_get_value_as_type_with_bad_type(self):
query = Query(field='metadata.size',
op='eq',
value='123.456',
type='blob')
query = api.Query(field='metadata.size',
op='eq',
value='123.456',
type='blob')
self.assertRaises(wsme.exc.ClientSideError, query._get_value_as_type)
def test_get_value_as_type_with_bad_value(self):
query = Query(field='metadata.size',
op='eq',
value='fake',
type='integer')
query = api.Query(field='metadata.size',
op='eq',
value='fake',
type='integer')
self.assertRaises(wsme.exc.ClientSideError, query._get_value_as_type)
def test_get_value_as_type_integer_expression_without_type(self):
# bug 1221736
query = Query(field='should_be_a_string',
op='eq',
value='123-1')
query = api.Query(field='should_be_a_string',
op='eq',
value='123-1')
expected = '123-1'
self.assertEqual(query._get_value_as_type(), expected)
def test_get_value_as_type_boolean_expression_without_type(self):
# bug 1221736
query = Query(field='should_be_a_string',
op='eq',
value='True or False')
query = api.Query(field='should_be_a_string',
op='eq',
value='True or False')
expected = 'True or False'
self.assertEqual(query._get_value_as_type(), expected)
def test_get_value_as_type_with_syntax_error(self):
# bug 1221736
value = 'WWW-Layer-4a80714f-0232-4580-aa5e-81494d1a4147-uolhh25p5xxm'
query = Query(field='group_id',
op='eq',
value=value)
query = api.Query(field='group_id',
op='eq',
value=value)
expected = value
self.assertEqual(query._get_value_as_type(), expected)
def test_get_value_as_type_with_syntax_error_colons(self):
# bug 1221736
value = 'Ref::StackId'
query = Query(field='field_name',
op='eq',
value=value)
query = api.Query(field='field_name',
op='eq',
value=value)
expected = value
self.assertEqual(query._get_value_as_type(), expected)
@ -182,30 +181,30 @@ class TestQueryToKwArgs(tests_base.BaseTestCase):
side_effect=lambda x, **z: x))
def test_sample_filter_single(self):
q = [Query(field='user_id',
op='eq',
value='uid')]
q = [api.Query(field='user_id',
op='eq',
value='uid')]
kwargs = api._query_to_kwargs(q, storage.SampleFilter.__init__)
self.assertIn('user', kwargs)
self.assertEqual(len(kwargs), 1)
self.assertEqual(kwargs['user'], 'uid')
def test_sample_filter_multi(self):
q = [Query(field='user_id',
op='eq',
value='uid'),
Query(field='project_id',
op='eq',
value='pid'),
Query(field='resource_id',
op='eq',
value='rid'),
Query(field='source',
op='eq',
value='source_name'),
Query(field='meter',
op='eq',
value='meter_name')]
q = [api.Query(field='user_id',
op='eq',
value='uid'),
api.Query(field='project_id',
op='eq',
value='pid'),
api.Query(field='resource_id',
op='eq',
value='rid'),
api.Query(field='source',
op='eq',
value='source_name'),
api.Query(field='meter',
op='eq',
value='meter_name')]
kwargs = api._query_to_kwargs(q, storage.SampleFilter.__init__)
self.assertEqual(len(kwargs), 5)
self.assertEqual(kwargs['user'], 'uid')
@ -217,12 +216,12 @@ class TestQueryToKwArgs(tests_base.BaseTestCase):
def test_sample_filter_timestamp(self):
ts_start = timeutils.utcnow()
ts_end = ts_start + datetime.timedelta(minutes=5)
q = [Query(field='timestamp',
op='lt',
value=str(ts_end)),
Query(field='timestamp',
op='gt',
value=str(ts_start))]
q = [api.Query(field='timestamp',
op='lt',
value=str(ts_end)),
api.Query(field='timestamp',
op='gt',
value=str(ts_start))]
kwargs = api._query_to_kwargs(q, storage.SampleFilter.__init__)
self.assertEqual(len(kwargs), 4)
self.assertTimestampEqual(kwargs['start'], ts_start)
@ -231,12 +230,12 @@ class TestQueryToKwArgs(tests_base.BaseTestCase):
self.assertEqual(kwargs['end_timestamp_op'], 'lt')
def test_sample_filter_meta(self):
q = [Query(field='metadata.size',
op='eq',
value='20'),
Query(field='resource_metadata.id',
op='eq',
value='meta_id')]
q = [api.Query(field='metadata.size',
op='eq',
value='20'),
api.Query(field='resource_metadata.id',
op='eq',
value='meta_id')]
kwargs = api._query_to_kwargs(q, storage.SampleFilter.__init__)
self.assertEqual(len(kwargs), 1)
self.assertEqual(len(kwargs['metaquery']), 2)
@ -244,14 +243,14 @@ class TestQueryToKwArgs(tests_base.BaseTestCase):
self.assertEqual(kwargs['metaquery']['metadata.id'], 'meta_id')
def test_sample_filter_non_equality_on_metadata(self):
queries = [Query(field='resource_metadata.image_id',
op='gt',
value='image',
type='string'),
Query(field='metadata.ramdisk_id',
op='le',
value='ramdisk',
type='string')]
queries = [api.Query(field='resource_metadata.image_id',
op='gt',
value='image',
type='string'),
api.Query(field='metadata.ramdisk_id',
op='le',
value='ramdisk',
type='string')]
with mock.patch('pecan.request') as request:
request.headers.return_value = {'X-ProjectId': 'foobar'}
self.assertRaises(
@ -261,35 +260,36 @@ class TestQueryToKwArgs(tests_base.BaseTestCase):
storage.SampleFilter.__init__)
def test_sample_filter_invalid_field(self):
q = [Query(field='invalid',
op='eq',
value='20')]
q = [api.Query(field='invalid',
op='eq',
value='20')]
self.assertRaises(
wsme.exc.UnknownArgument,
api._query_to_kwargs, q, storage.SampleFilter.__init__)
def test_sample_filter_invalid_op(self):
q = [Query(field='user_id',
op='lt',
value='20')]
q = [api.Query(field='user_id',
op='lt',
value='20')]
self.assertRaises(
wsme.exc.InvalidInput,
api._query_to_kwargs, q, storage.SampleFilter.__init__)
def test_sample_filter_timestamp_invalid_op(self):
ts_start = timeutils.utcnow()
q = [Query(field='timestamp',
op='eq',
value=str(ts_start))]
q = [api.Query(field='timestamp',
op='eq',
value=str(ts_start))]
self.assertRaises(
wsme.exc.InvalidInput,
api._query_to_kwargs, q, storage.SampleFilter.__init__)
def test_sample_filter_exclude_internal(self):
queries = [Query(field=f,
op='eq',
value='fake',
type='string') for f in ['y', 'on_behalf_of', 'x']]
queries = [api.Query(field=f,
op='eq',
value='fake',
type='string')
for f in ['y', 'on_behalf_of', 'x']]
with mock.patch('pecan.request') as request:
request.headers.return_value = {'X-ProjectId': 'foobar'}
self.assertRaises(wsme.exc.ClientSideError,
@ -299,9 +299,9 @@ class TestQueryToKwArgs(tests_base.BaseTestCase):
internal_keys=['on_behalf_of'])
def test_sample_filter_self_always_excluded(self):
queries = [Query(field='user_id',
op='eq',
value='20')]
queries = [api.Query(field='user_id',
op='eq',
value='20')]
with mock.patch('pecan.request') as request:
request.headers.return_value = {'X-ProjectId': 'foobar'}
kwargs = api._query_to_kwargs(queries,
@ -309,12 +309,12 @@ class TestQueryToKwArgs(tests_base.BaseTestCase):
self.assertFalse('self' in kwargs)
def test_sample_filter_translation(self):
queries = [Query(field=f,
op='eq',
value='fake_%s' % f,
type='string') for f in ['user_id',
'project_id',
'resource_id']]
queries = [api.Query(field=f,
op='eq',
value='fake_%s' % f,
type='string') for f in ['user_id',
'project_id',
'resource_id']]
with mock.patch('pecan.request') as request:
request.headers.return_value = {'X-ProjectId': 'foobar'}
kwargs = api._query_to_kwargs(queries,

View File

@ -17,7 +17,7 @@
# under the License.
"""Tests for ceilometer/collector/dispatcher/database.py
"""
from datetime import datetime
import datetime
import mock
@ -84,7 +84,7 @@ class TestDispatcherDB(test.BaseTestCase):
)
expected = msg.copy()
expected['timestamp'] = datetime(2012, 7, 2, 13, 53, 40)
expected['timestamp'] = datetime.datetime(2012, 7, 2, 13, 53, 40)
with mock.patch.object(self.dispatcher.storage_conn,
'record_metering_data') as record_metering_data:
@ -104,7 +104,8 @@ class TestDispatcherDB(test.BaseTestCase):
)
expected = msg.copy()
expected['timestamp'] = datetime(2012, 9, 30, 23, 31, 50, 262000)
expected['timestamp'] = datetime.datetime(2012, 9, 30, 23,
31, 50, 262000)
with mock.patch.object(self.dispatcher.storage_conn,
'record_metering_data') as record_metering_data:

View File

@ -19,7 +19,7 @@
"""Tests for libvirt inspector.
"""
from contextlib import nested
import contextlib
import fixtures
import mock
@ -49,12 +49,12 @@ class TestLibvirtInspection(test.BaseTestCase):
fake_domain = FakeDomain()
connection = self.inspector.connection
with nested(mock.patch.object(connection, 'numOfDomains',
return_value=1),
mock.patch.object(connection, 'listDomainsID',
return_value=[42]),
mock.patch.object(connection, 'lookupByID',
return_value=fake_domain)):
with contextlib.nested(mock.patch.object(connection, 'numOfDomains',
return_value=1),
mock.patch.object(connection, 'listDomainsID',
return_value=[42]),
mock.patch.object(connection, 'lookupByID',
return_value=fake_domain)):
inspected_instances = list(self.inspector.inspect_instances())
self.assertEqual(len(inspected_instances), 1)
inspected_instance = inspected_instances[0]
@ -62,11 +62,12 @@ class TestLibvirtInspection(test.BaseTestCase):
self.assertEqual(inspected_instance.UUID, 'uuid')
def test_inspect_cpus(self):
with nested(mock.patch.object(self.inspector.connection,
'lookupByName',
return_value=self.domain),
mock.patch.object(self.domain, 'info',
return_value=(0L, 0L, 0L, 2L, 999999L))):
with contextlib.nested(mock.patch.object(self.inspector.connection,
'lookupByName',
return_value=self.domain),
mock.patch.object(self.domain, 'info',
return_value=(0L, 0L, 0L,
2L, 999999L))):
cpu_info = self.inspector.inspect_cpus(self.instance_name)
self.assertEqual(cpu_info.number, 2L)
self.assertEqual(cpu_info.time, 999999L)
@ -137,12 +138,13 @@ class TestLibvirtInspection(test.BaseTestCase):
interfaceStats = interface_stats.__getitem__
connection = self.inspector.connection
with nested(mock.patch.object(connection, 'lookupByName',
return_value=self.domain),
mock.patch.object(self.domain, 'XMLDesc',
return_value=dom_xml),
mock.patch.object(self.domain, 'interfaceStats',
side_effect=interfaceStats)):
with contextlib.nested(mock.patch.object(connection, 'lookupByName',
return_value=self.domain),
mock.patch.object(self.domain, 'XMLDesc',
return_value=dom_xml),
mock.patch.object(self.domain,
'interfaceStats',
side_effect=interfaceStats)):
interfaces = list(self.inspector.inspect_vnics(self.instance_name))
self.assertEqual(len(interfaces), 3)
@ -200,13 +202,14 @@ class TestLibvirtInspection(test.BaseTestCase):
</domain>
"""
with nested(mock.patch.object(self.inspector.connection,
'lookupByName',
return_value=self.domain),
mock.patch.object(self.domain, 'XMLDesc',
return_value=dom_xml),
mock.patch.object(self.domain, 'blockStats',
return_value=(1L, 2L, 3L, 4L, -1))):
with contextlib.nested(mock.patch.object(self.inspector.connection,
'lookupByName',
return_value=self.domain),
mock.patch.object(self.domain, 'XMLDesc',
return_value=dom_xml),
mock.patch.object(self.domain, 'blockStats',
return_value=(1L, 2L, 3L,
4L, -1))):
disks = list(self.inspector.inspect_disks(self.instance_name))
self.assertEqual(len(disks), 1)

View File

@ -17,7 +17,7 @@
# License for the specific language governing permissions and limitations
# under the License.
from datetime import datetime
import datetime
from ceilometer.image import notifications
from ceilometer.openstack.common import test
@ -28,7 +28,7 @@ def fake_uuid(x):
return '%s-%s-%s-%s' % (x * 8, x * 4, x * 4, x * 12)
NOW = datetime.isoformat(datetime.utcnow())
NOW = datetime.datetime.isoformat(datetime.datetime.utcnow())
NOTIFICATION_SEND = {
u'event_type': u'image.send',

View File

@ -20,7 +20,7 @@
import cStringIO as StringIO
import mock
from webob import Request
import webob
from ceilometer.objectstore import swift_middleware
from ceilometer.openstack.common.fixture import config
@ -80,8 +80,8 @@ class TestSwiftMiddleware(test.BaseTestCase):
def test_get(self):
app = swift_middleware.CeilometerMiddleware(FakeApp(), {})
req = Request.blank('/1.0/account/container/obj',
environ={'REQUEST_METHOD': 'GET'})
req = webob.Request.blank('/1.0/account/container/obj',
environ={'REQUEST_METHOD': 'GET'})
resp = app(req.environ, self.start_response)
self.assertEqual(list(resp), ["This string is 28 bytes long"])
samples = self.pipeline_manager.pipelines[0].samples
@ -100,10 +100,10 @@ class TestSwiftMiddleware(test.BaseTestCase):
def test_put(self):
app = swift_middleware.CeilometerMiddleware(FakeApp(body=['']), {})
req = Request.blank('/1.0/account/container/obj',
environ={'REQUEST_METHOD': 'PUT',
'wsgi.input':
StringIO.StringIO('some stuff')})
req = webob.Request.blank('/1.0/account/container/obj',
environ={'REQUEST_METHOD': 'PUT',
'wsgi.input':
StringIO.StringIO('some stuff')})
list(app(req.environ, self.start_response))
samples = self.pipeline_manager.pipelines[0].samples
self.assertEqual(len(samples), 2)
@ -121,10 +121,10 @@ class TestSwiftMiddleware(test.BaseTestCase):
def test_post(self):
app = swift_middleware.CeilometerMiddleware(FakeApp(body=['']), {})
req = Request.blank('/1.0/account/container/obj',
environ={'REQUEST_METHOD': 'POST',
'wsgi.input':
StringIO.StringIO('some other stuff')})
req = webob.Request.blank(
'/1.0/account/container/obj',
environ={'REQUEST_METHOD': 'POST',
'wsgi.input': StringIO.StringIO('some other stuff')})
list(app(req.environ, self.start_response))
samples = self.pipeline_manager.pipelines[0].samples
self.assertEqual(len(samples), 2)
@ -142,8 +142,8 @@ class TestSwiftMiddleware(test.BaseTestCase):
def test_head(self):
app = swift_middleware.CeilometerMiddleware(FakeApp(body=['']), {})
req = Request.blank('/1.0/account/container/obj',
environ={'REQUEST_METHOD': 'HEAD'})
req = webob.Request.blank('/1.0/account/container/obj',
environ={'REQUEST_METHOD': 'HEAD'})
list(app(req.environ, self.start_response))
samples = self.pipeline_manager.pipelines[0].samples
self.assertEqual(len(samples), 1)
@ -159,8 +159,8 @@ class TestSwiftMiddleware(test.BaseTestCase):
def test_bogus_request(self):
"""Test even for arbitrary request method, this will still work."""
app = swift_middleware.CeilometerMiddleware(FakeApp(body=['']), {})
req = Request.blank('/1.0/account/container/obj',
environ={'REQUEST_METHOD': 'BOGUS'})
req = webob.Request.blank('/1.0/account/container/obj',
environ={'REQUEST_METHOD': 'BOGUS'})
list(app(req.environ, self.start_response))
samples = self.pipeline_manager.pipelines[0].samples
@ -176,8 +176,8 @@ class TestSwiftMiddleware(test.BaseTestCase):
def test_get_container(self):
app = swift_middleware.CeilometerMiddleware(FakeApp(), {})
req = Request.blank('/1.0/account/container',
environ={'REQUEST_METHOD': 'GET'})
req = webob.Request.blank('/1.0/account/container',
environ={'REQUEST_METHOD': 'GET'})
list(app(req.environ, self.start_response))
samples = self.pipeline_manager.pipelines[0].samples
self.assertEqual(len(samples), 2)
@ -189,8 +189,8 @@ class TestSwiftMiddleware(test.BaseTestCase):
def test_no_metadata_headers(self):
app = swift_middleware.CeilometerMiddleware(FakeApp(), {})
req = Request.blank('/1.0/account/container',
environ={'REQUEST_METHOD': 'GET'})
req = webob.Request.blank('/1.0/account/container',
environ={'REQUEST_METHOD': 'GET'})
list(app(req.environ, self.start_response))
samples = self.pipeline_manager.pipelines[0].samples
self.assertEqual(len(samples), 2)
@ -206,12 +206,10 @@ class TestSwiftMiddleware(test.BaseTestCase):
app = swift_middleware.CeilometerMiddleware(FakeApp(), {
'metadata_headers': 'X_VAR1, x-var2, x-var3'
})
req = Request.blank('/1.0/account/container',
environ={'REQUEST_METHOD': 'GET'},
headers={
'X_VAR1': 'value1',
'X_VAR2': 'value2'
})
req = webob.Request.blank('/1.0/account/container',
environ={'REQUEST_METHOD': 'GET'},
headers={'X_VAR1': 'value1',
'X_VAR2': 'value2'})
list(app(req.environ, self.start_response))
samples = self.pipeline_manager.pipelines[0].samples
self.assertEqual(len(samples), 2)
@ -232,8 +230,8 @@ class TestSwiftMiddleware(test.BaseTestCase):
app = swift_middleware.CeilometerMiddleware(FakeApp(), {
'metadata_headers': 'x-var3'
})
req = Request.blank('/1.0/account/container',
environ={'REQUEST_METHOD': 'GET'})
req = webob.Request.blank('/1.0/account/container',
environ={'REQUEST_METHOD': 'GET'})
list(app(req.environ, self.start_response))
samples = self.pipeline_manager.pipelines[0].samples
self.assertEqual(len(samples), 2)
@ -247,16 +245,15 @@ class TestSwiftMiddleware(test.BaseTestCase):
def test_bogus_path(self):
app = swift_middleware.CeilometerMiddleware(FakeApp(), {})
req = Request.blank('//v1/account/container',
environ={'REQUEST_METHOD': 'GET'})
req = webob.Request.blank('//v1/account/container',
environ={'REQUEST_METHOD': 'GET'})
list(app(req.environ, self.start_response))
samples = self.pipeline_manager.pipelines[0].samples
self.assertEqual(len(samples), 0)
def test_missing_resource_id(self):
app = swift_middleware.CeilometerMiddleware(FakeApp(), {})
req = Request.blank('/5.0/',
environ={'REQUEST_METHOD': 'GET'})
req = webob.Request.blank('/5.0/', environ={'REQUEST_METHOD': 'GET'})
list(app(req.environ, self.start_response))
samples = self.pipeline_manager.pipelines[0].samples
self.assertEqual(len(samples), 0)
@ -266,8 +263,8 @@ class TestSwiftMiddleware(test.BaseTestCase):
def test_publish_sample_fail(self, mocked_publish_sample):
mocked_publish_sample.side_effect = Exception("a exception")
app = swift_middleware.CeilometerMiddleware(FakeApp(body=["test"]), {})
req = Request.blank('/1.0/account/container',
environ={'REQUEST_METHOD': 'GET'})
req = webob.Request.blank('/1.0/account/container',
environ={'REQUEST_METHOD': 'GET'})
resp = list(app(req.environ, self.start_response))
samples = self.pipeline_manager.pipelines[0].samples
self.assertEqual(len(samples), 0)

View File

@ -24,7 +24,7 @@ import logging.handlers
import os
import tempfile
from ceilometer.openstack.common.network_utils import urlsplit
from ceilometer.openstack.common import network_utils as utils
from ceilometer.openstack.common import test
from ceilometer.publisher import file
from ceilometer import sample
@ -72,8 +72,8 @@ class TestFilePublisher(test.BaseTestCase):
# Test valid configurations
tempdir = tempfile.mkdtemp()
name = '%s/log_file' % tempdir
parsed_url = urlsplit('file://%s?max_bytes=50&backup_count=3'
% name)
parsed_url = utils.urlsplit('file://%s?max_bytes=50&backup_count=3'
% name)
publisher = file.FilePublisher(parsed_url)
publisher.publish_samples(None,
self.test_data)
@ -91,7 +91,7 @@ class TestFilePublisher(test.BaseTestCase):
# Test missing max bytes, backup count configurations
tempdir = tempfile.mkdtemp()
name = '%s/log_file_plain' % tempdir
parsed_url = urlsplit('file://%s' % name)
parsed_url = utils.urlsplit('file://%s' % name)
publisher = file.FilePublisher(parsed_url)
publisher.publish_samples(None,
self.test_data)
@ -113,7 +113,7 @@ class TestFilePublisher(test.BaseTestCase):
def test_file_publisher_invalid(self):
# Test invalid max bytes, backup count configurations
tempdir = tempfile.mkdtemp()
parsed_url = urlsplit(
parsed_url = utils.urlsplit(
'file://%s/log_file_bad'
'?max_bytes=yus&backup_count=5y' % tempdir)
publisher = file.FilePublisher(parsed_url)

View File

@ -16,7 +16,7 @@
# License for the specific language governing permissions and limitations
# under the License.
from datetime import datetime
import datetime
import mock
from sqlalchemy.dialects.mysql import DECIMAL
@ -46,7 +46,7 @@ class PreciseTimestampTest(test.BaseTestCase):
self._mysql_dialect = self.fake_dialect('mysql')
self._postgres_dialect = self.fake_dialect('postgres')
self._type = models.PreciseTimestamp()
self._date = datetime(2012, 7, 2, 10, 44)
self._date = datetime.datetime(2012, 7, 2, 10, 44)
def test_load_dialect_impl_mysql(self):
result = self._type.load_dialect_impl(self._mysql_dialect)

View File

@ -26,8 +26,7 @@
"""
from mock import patch
from ceilometer.storage.impl_hbase import Connection
from ceilometer.storage.impl_hbase import MConnection
from ceilometer.storage import impl_hbase as hbase
from ceilometer.tests import db as tests_db
@ -39,8 +38,8 @@ class ConnectionTest(HBaseEngineTestBase):
def test_hbase_connection(self):
self.CONF.database.connection = self.database_connection
conn = Connection(self.CONF)
self.assertIsInstance(conn.conn, MConnection)
conn = hbase.Connection(self.CONF)
self.assertIsInstance(conn.conn, hbase.MConnection)
class TestConn(object):
def __init__(self, host, port):
@ -53,7 +52,7 @@ class ConnectionTest(HBaseEngineTestBase):
return TestConn(conf['host'], conf['port'])
self.CONF.database.connection = 'hbase://test_hbase:9090'
with patch.object(Connection, '_get_connection',
with patch.object(hbase.Connection, '_get_connection',
side_effect=get_connection):
conn = Connection(self.CONF)
conn = hbase.Connection(self.CONF)
self.assertIsInstance(conn.conn, TestConn)

View File

@ -30,8 +30,7 @@ from mock import patch
from ceilometer.publisher import rpc
from ceilometer import sample
from ceilometer.storage.base import MultipleResultsFound
from ceilometer.storage.base import NoResultFound
from ceilometer.storage import base
from ceilometer.storage import impl_mongodb
from ceilometer.tests import db as tests_db
from ceilometer.tests.storage import test_storage_scenarios
@ -81,7 +80,7 @@ class MongoDBTestMarkerBase(test_storage_scenarios.DBTestBase,
ret = impl_mongodb.Connection._get_marker(self.conn.db.resource,
marker_pairs)
self.assertEqual(ret['project_id'], 'project-id-foo')
except NoResultFound:
except base.NoResultFound:
self.assertTrue(True)
def test_get_marker_multiple(self):
@ -90,7 +89,7 @@ class MongoDBTestMarkerBase(test_storage_scenarios.DBTestBase,
ret = impl_mongodb.Connection._get_marker(self.conn.db.resource,
marker_pairs)
self.assertEqual(ret['project_id'], 'project-id-foo')
except MultipleResultsFound:
except base.MultipleResultsFound:
self.assertTrue(True)
@ -279,7 +278,7 @@ class AlarmTestPagination(test_storage_scenarios.AlarmTestBase,
marker_pairs)
self.assertEqual(ret['rule']['meter_name'],
'meter_name-foo')
except NoResultFound:
except base.NoResultFound:
self.assertTrue(True)
def test_alarm_get_marker_multiple(self):
@ -290,5 +289,5 @@ class AlarmTestPagination(test_storage_scenarios.AlarmTestBase,
marker_pairs)
self.assertEqual(ret['rule']['meter_name'],
'counter-name-foo')
except MultipleResultsFound:
except base.MultipleResultsFound:
self.assertTrue(True)

View File

@ -30,7 +30,6 @@ from mock import patch
from ceilometer.storage import models
from ceilometer.storage.sqlalchemy import models as sql_models
from ceilometer.storage.sqlalchemy.models import table_args
from ceilometer.tests import db as tests_db
from ceilometer import utils
@ -142,4 +141,4 @@ class ModelTest(tests_db.TestBase):
database_connection = 'mysql://localhost'
def test_model_table_args(self):
self.assertIsNotNone(table_args())
self.assertIsNotNone(sql_models.table_args())

View File

@ -27,7 +27,7 @@ from ceilometer.openstack.common import timeutils
from ceilometer.publisher import rpc
from ceilometer import sample
from ceilometer import storage
from ceilometer.storage.base import Pagination
from ceilometer.storage import base
from ceilometer.storage import models
from ceilometer.tests import db as tests_db
@ -305,39 +305,40 @@ class ResourceTestPagination(DBTestBase,
tests_db.MixinTestsWithBackendScenarios):
def test_get_resource_all_limit(self):
pagination = Pagination(limit=8)
pagination = base.Pagination(limit=8)
results = list(self.conn.get_resources(pagination=pagination))
self.assertEqual(len(results), 8)
pagination = Pagination(limit=5)
pagination = base.Pagination(limit=5)
results = list(self.conn.get_resources(pagination=pagination))
self.assertEqual(len(results), 5)
def test_get_resources_all_marker(self):
pagination = Pagination(primary_sort_dir='asc', sort_keys=['user_id'],
sort_dirs=['asc'],
marker_value='resource-id-4')
pagination = base.Pagination(primary_sort_dir='asc',
sort_keys=['user_id'],
sort_dirs=['asc'],
marker_value='resource-id-4')
results = list(self.conn.get_resources(pagination=pagination))
self.assertEqual(len(results), 5)
def test_get_resources_paginate(self):
pagination = Pagination(limit=3, primary_sort_dir='asc',
sort_keys=['user_id'], sort_dirs=['asc'],
marker_value='resource-id-4')
pagination = base.Pagination(limit=3, primary_sort_dir='asc',
sort_keys=['user_id'], sort_dirs=['asc'],
marker_value='resource-id-4')
results = self.conn.get_resources(pagination=pagination)
self.assertEqual(['user-id-5', 'user-id-6', 'user-id-7'],
[i.user_id for i in results])
pagination = Pagination(limit=2, primary_sort_dir='desc',
sort_keys=['user_id'], sort_dirs=['asc'],
marker_value='resource-id-4')
pagination = base.Pagination(limit=2, primary_sort_dir='desc',
sort_keys=['user_id'], sort_dirs=['asc'],
marker_value='resource-id-4')
results = list(self.conn.get_resources(pagination=pagination))
self.assertEqual(['user-id-3', 'user-id-2'],
[i.user_id for i in results])
pagination = Pagination(limit=3, primary_sort_dir='asc',
sort_keys=['user_id'], sort_dirs=['asc'],
marker_value='resource-id-5')
pagination = base.Pagination(limit=3, primary_sort_dir='asc',
sort_keys=['user_id'], sort_dirs=['asc'],
marker_value='resource-id-5')
results = list(self.conn.get_resources(pagination=pagination))
self.assertEqual(['resource-id-6', 'resource-id-7', 'resource-id-8'],
[i.resource_id for i in results])
@ -427,47 +428,48 @@ class MeterTestPagination(DBTestBase,
tests_db.MixinTestsWithBackendScenarios):
def tet_get_meters_all_limit(self):
pagination = Pagination(limit=8)
pagination = base.Pagination(limit=8)
results = list(self.conn.get_meters(pagination=pagination))
self.assertEqual(len(results), 8)
pagination = Pagination(limit=5)
pagination = base.Pagination(limit=5)
results = list(self.conn.get_meters(pagination=pagination))
self.assertEqual(len(results), 5)
def test_get_meters_all_marker(self):
pagination = Pagination(limit=3, primary_sort_dir='desc',
sort_keys=['user_id'], sort_dirs=['desc'],
marker_value='resource-id-5')
pagination = base.Pagination(limit=3, primary_sort_dir='desc',
sort_keys=['user_id'],
sort_dirs=['desc'],
marker_value='resource-id-5')
results = list(self.conn.get_meters(pagination=pagination))
self.assertEqual(len(results), 8)
def test_get_meters_paginate(self):
pagination = Pagination(limit=3, primary_sort_dir='desc',
sort_keys=['user_id'], sort_dirs=['desc'],
marker_value='resource-id-5')
pagination = base.Pagination(limit=3, primary_sort_dir='desc',
sort_keys=['user_id'], sort_dirs=['desc'],
marker_value='resource-id-5')
results = self.conn.get_meters(pagination=pagination)
self.assertEqual(['user-id-8', 'user-id-7', 'user-id-6'],
[i.user_id for i in results])
pagination = Pagination(limit=3, primary_sort_dir='asc',
sort_keys=['user_id'], sort_dirs=['desc'],
marker_value='resource-id-5')
pagination = base.Pagination(limit=3, primary_sort_dir='asc',
sort_keys=['user_id'], sort_dirs=['desc'],
marker_value='resource-id-5')
results = self.conn.get_meters(pagination=pagination)
self.assertEqual(['user-id-5', 'user-id-6', 'user-id-7'],
[i.user_id for i in results])
pagination = Pagination(limit=2, primary_sort_dir='desc',
sort_keys=['user_id'], sort_dirs=['desc'],
marker_value='resource-id-5')
pagination = base.Pagination(limit=2, primary_sort_dir='desc',
sort_keys=['user_id'], sort_dirs=['desc'],
marker_value='resource-id-5')
results = list(self.conn.get_meters(pagination=pagination))
self.assertEqual(['user-id-3', 'user-id-2'],
[i.user_id for i in results])
pagination = Pagination(limit=3, primary_sort_dir='desc',
sort_keys=['user_id'], sort_dirs=['desc'],
marker_value='resource-id-5')
pagination = base.Pagination(limit=3, primary_sort_dir='desc',
sort_keys=['user_id'], sort_dirs=['desc'],
marker_value='resource-id-5')
results = self.conn.get_meters(pagination=pagination)
self.assertEqual([], [i.user_id for i in results])
@ -2029,26 +2031,26 @@ class AlarmTestPagination(AlarmTestBase,
def test_get_alarm_all_limit(self):
self.add_some_alarms()
pagination = Pagination(limit=2)
pagination = base.Pagination(limit=2)
alarms = list(self.conn.get_alarms(pagination=pagination))
self.assertEqual(len(alarms), 2)
pagination = Pagination(limit=1)
pagination = base.Pagination(limit=1)
alarms = list(self.conn.get_alarms(pagination=pagination))
self.assertEqual(len(alarms), 1)
def test_get_alarm_all_marker(self):
self.add_some_alarms()
pagination = Pagination(marker_value='orange-alert')
pagination = base.Pagination(marker_value='orange-alert')
alarms = list(self.conn.get_alarms(pagination=pagination))
self.assertEqual(len(alarms), 0)
pagination = Pagination(marker_value='red-alert')
pagination = base.Pagination(marker_value='red-alert')
alarms = list(self.conn.get_alarms(pagination=pagination))
self.assertEqual(len(alarms), 1)
pagination = Pagination(marker_value='yellow-alert')
pagination = base.Pagination(marker_value='yellow-alert')
alarms = list(self.conn.get_alarms(pagination=pagination))
self.assertEqual(len(alarms), 2)
@ -2056,12 +2058,12 @@ class AlarmTestPagination(AlarmTestBase,
self.add_some_alarms()
pagination = Pagination(limit=4, marker_value='yellow-alert')
pagination = base.Pagination(limit=4, marker_value='yellow-alert')
page = list(self.conn.get_alarms(pagination=pagination))
self.assertEqual(['red-alert', 'orange-alert'], [i.name for i in page])
pagination = Pagination(limit=2, marker_value='orange-alert',
primary_sort_dir='asc')
pagination = base.Pagination(limit=2, marker_value='orange-alert',
primary_sort_dir='asc')
page1 = list(self.conn.get_alarms(pagination=pagination))
self.assertEqual(['red-alert', 'yellow-alert'],
[i.name for i in page1])

View File

@ -16,9 +16,9 @@
# License for the specific language governing permissions and limitations
# under the License.
from collections import defaultdict
import collections
from ceilometer.openstack.common.gettextutils import _
from ceilometer.openstack.common.gettextutils import _ # noqa
from ceilometer.openstack.common import log
from ceilometer.openstack.common import timeutils
from ceilometer import sample
@ -34,7 +34,7 @@ class Namespace(object):
to yield false when used in a boolean expression.
"""
def __init__(self, seed):
self.__dict__ = defaultdict(lambda: Namespace({}))
self.__dict__ = collections.defaultdict(lambda: Namespace({}))
self.__dict__.update(seed)
for k, v in self.__dict__.iteritems():
if isinstance(v, dict):

View File

@ -45,7 +45,6 @@ deps = -r{toxinidir}/requirements.txt
commands = {posargs}
[flake8]
ignore = H302
builtins = _
exclude=.venv,.git,.tox,dist,doc,./ceilometer/openstack/common,*lib/python*,*egg,tools,nova_tests,build
show-source = True