Merge "Use None instead of mutables in method params defaults"

This commit is contained in:
Jenkins 2014-05-23 14:03:23 +00:00 committed by Gerrit Code Review
commit 4bfa2f4a7d
24 changed files with 121 additions and 65 deletions

View File

@ -99,8 +99,9 @@ class PollingTask(object):
class AgentManager(os_service.Service): class AgentManager(os_service.Service):
def __init__(self, namespace, default_discovery=[]): def __init__(self, namespace, default_discovery=None):
super(AgentManager, self).__init__() super(AgentManager, self).__init__()
default_discovery = default_discovery or []
self.default_discovery = default_discovery self.default_discovery = default_discovery
self.pollster_manager = self._extensions('poll', namespace) self.pollster_manager = self._extensions('poll', namespace)
self.discovery_manager = self._extensions('discover') self.discovery_manager = self._extensions('discover')
@ -156,7 +157,7 @@ class AgentManager(os_service.Service):
return d.obj return d.obj
return None return None
def discover(self, discovery=[]): def discover(self, discovery=None):
resources = [] resources = []
for url in (discovery or self.default_discovery): for url in (discovery or self.default_discovery):
name, param = self._parse_discoverer(url) name, param = self._parse_discoverer(url)

View File

@ -332,7 +332,7 @@ def _verify_query_segregation(query, auth_project=None):
raise ProjectNotAuthorized(q.value) raise ProjectNotAuthorized(q.value)
def _validate_query(query, db_func, internal_keys=[], def _validate_query(query, db_func, internal_keys=None,
allow_timestamps=True): allow_timestamps=True):
"""Validates the syntax of the query and verifies that the query """Validates the syntax of the query and verifies that the query
request is authorized for the included project. request is authorized for the included project.
@ -356,6 +356,7 @@ def _validate_query(query, db_func, internal_keys=[],
""" """
internal_keys = internal_keys or []
_verify_query_segregation(query) _verify_query_segregation(query)
valid_keys = inspect.getargspec(db_func)[0] valid_keys = inspect.getargspec(db_func)[0]
@ -446,8 +447,9 @@ def _validate_timestamp_fields(query, field_name, operator_list,
return False return False
def _query_to_kwargs(query, db_func, internal_keys=[], def _query_to_kwargs(query, db_func, internal_keys=None,
allow_timestamps=True): allow_timestamps=True):
internal_keys = internal_keys or []
_validate_query(query, db_func, internal_keys=internal_keys, _validate_query(query, db_func, internal_keys=internal_keys,
allow_timestamps=allow_timestamps) allow_timestamps=allow_timestamps)
query = _sanitize_query(query, db_func) query = _sanitize_query(query, db_func)
@ -519,7 +521,7 @@ def _validate_groupby_fields(groupby_fields):
return list(set(groupby_fields)) return list(set(groupby_fields))
def _get_query_timestamps(args={}): def _get_query_timestamps(args=None):
"""Return any optional timestamp information in the request. """Return any optional timestamp information in the request.
Determine the desired range, if any, from the GET arguments. Set Determine the desired range, if any, from the GET arguments. Set
@ -536,6 +538,12 @@ def _get_query_timestamps(args={}):
search_offset: search_offset parameter from request search_offset: search_offset parameter from request
""" """
if args is None:
return {'query_start': None,
'query_end': None,
'start_timestamp': None,
'end_timestamp': None,
'search_offset': 0}
search_offset = int(args.get('search_offset', 0)) search_offset = int(args.get('search_offset', 0))
start_timestamp = args.get('start_timestamp') start_timestamp = args.get('start_timestamp')
@ -645,8 +653,9 @@ class OldSample(_Base):
message_id = wtypes.text message_id = wtypes.text
"A unique identifier for the sample" "A unique identifier for the sample"
def __init__(self, counter_volume=None, resource_metadata={}, def __init__(self, counter_volume=None, resource_metadata=None,
timestamp=None, **kwds): timestamp=None, **kwds):
resource_metadata = resource_metadata or {}
if counter_volume is not None: if counter_volume is not None:
counter_volume = float(counter_volume) counter_volume = float(counter_volume)
resource_metadata = _flatten_metadata(resource_metadata) resource_metadata = _flatten_metadata(resource_metadata)
@ -810,12 +819,13 @@ class MeterController(rest.RestController):
self.meter_name = meter_name self.meter_name = meter_name
@wsme_pecan.wsexpose([OldSample], [Query], int) @wsme_pecan.wsexpose([OldSample], [Query], int)
def get_all(self, q=[], limit=None): def get_all(self, q=None, limit=None):
"""Return samples for the meter. """Return samples for the meter.
:param q: Filter rules for the data to be returned. :param q: Filter rules for the data to be returned.
:param limit: Maximum number of samples to return. :param limit: Maximum number of samples to return.
""" """
q = q or []
if limit and limit < 0: if limit and limit < 0:
raise ClientSideError(_("Limit must be positive")) raise ClientSideError(_("Limit must be positive"))
kwargs = _query_to_kwargs(q, storage.SampleFilter.__init__) kwargs = _query_to_kwargs(q, storage.SampleFilter.__init__)
@ -886,7 +896,7 @@ class MeterController(rest.RestController):
return samples return samples
@wsme_pecan.wsexpose([Statistics], [Query], [unicode], int, [Aggregate]) @wsme_pecan.wsexpose([Statistics], [Query], [unicode], int, [Aggregate])
def statistics(self, q=[], groupby=[], period=None, aggregate=[]): def statistics(self, q=None, groupby=None, period=None, aggregate=None):
"""Computes the statistics of the samples in the time range given. """Computes the statistics of the samples in the time range given.
:param q: Filter rules for the data to be returned. :param q: Filter rules for the data to be returned.
@ -895,6 +905,10 @@ class MeterController(rest.RestController):
period long of that number of seconds. period long of that number of seconds.
:param aggregate: The selectable aggregation functions to be applied. :param aggregate: The selectable aggregation functions to be applied.
""" """
q = q or []
groupby = groupby or []
aggregate = aggregate or []
if period and period < 0: if period and period < 0:
raise ClientSideError(_("Period must be positive.")) raise ClientSideError(_("Period must be positive."))
@ -979,11 +993,13 @@ class MetersController(rest.RestController):
return MeterController(meter_name), remainder return MeterController(meter_name), remainder
@wsme_pecan.wsexpose([Meter], [Query]) @wsme_pecan.wsexpose([Meter], [Query])
def get_all(self, q=[]): def get_all(self, q=None):
"""Return all known meters, based on the data recorded so far. """Return all known meters, based on the data recorded so far.
:param q: Filter rules for the meters to be returned. :param q: Filter rules for the meters to be returned.
""" """
q = q or []
#Timestamp field is not supported for Meter queries #Timestamp field is not supported for Meter queries
kwargs = _query_to_kwargs(q, pecan.request.storage_conn.get_meters, kwargs = _query_to_kwargs(q, pecan.request.storage_conn.get_meters,
allow_timestamps=False) allow_timestamps=False)
@ -1067,12 +1083,14 @@ class SamplesController(rest.RestController):
"""Controller managing the samples.""" """Controller managing the samples."""
@wsme_pecan.wsexpose([Sample], [Query], int) @wsme_pecan.wsexpose([Sample], [Query], int)
def get_all(self, q=[], limit=None): def get_all(self, q=None, limit=None):
"""Return all known samples, based on the data recorded so far. """Return all known samples, based on the data recorded so far.
:param q: Filter rules for the samples to be returned. :param q: Filter rules for the samples to be returned.
:param limit: Maximum number of samples to be returned. :param limit: Maximum number of samples to be returned.
""" """
q = q or []
if limit and limit < 0: if limit and limit < 0:
raise ClientSideError(_("Limit must be positive")) raise ClientSideError(_("Limit must be positive"))
kwargs = _query_to_kwargs(q, storage.SampleFilter.__init__) kwargs = _query_to_kwargs(q, storage.SampleFilter.__init__)
@ -1145,8 +1163,9 @@ class ValidatedComplexQuery(object):
timestamp_fields = ["timestamp", "state_timestamp"] timestamp_fields = ["timestamp", "state_timestamp"]
def __init__(self, query, db_model, additional_name_mapping={}, def __init__(self, query, db_model, additional_name_mapping=None,
metadata_allowed=False): metadata_allowed=False):
additional_name_mapping = additional_name_mapping or {}
self.name_mapping = {"user": "user_id", self.name_mapping = {"user": "user_id",
"project": "project_id"} "project": "project_id"}
self.name_mapping.update(additional_name_mapping) self.name_mapping.update(additional_name_mapping)
@ -1404,7 +1423,8 @@ class Resource(_Base):
source = wtypes.text source = wtypes.text
"The source where the resource come from" "The source where the resource come from"
def __init__(self, metadata={}, **kwds): def __init__(self, metadata=None, **kwds):
metadata = metadata or {}
metadata = _flatten_metadata(metadata) metadata = _flatten_metadata(metadata)
super(Resource, self).__init__(metadata=metadata, **kwds) super(Resource, self).__init__(metadata=metadata, **kwds)
@ -1457,12 +1477,13 @@ class ResourcesController(rest.RestController):
self._resource_links(resource_id)) self._resource_links(resource_id))
@wsme_pecan.wsexpose([Resource], [Query], int) @wsme_pecan.wsexpose([Resource], [Query], int)
def get_all(self, q=[], meter_links=1): def get_all(self, q=None, meter_links=1):
"""Retrieve definitions of all of the resources. """Retrieve definitions of all of the resources.
:param q: Filter rules for the resources to be returned. :param q: Filter rules for the resources to be returned.
:param meter_links: option to include related meter links :param meter_links: option to include related meter links
""" """
q = q or []
kwargs = _query_to_kwargs(q, pecan.request.storage_conn.get_resources) kwargs = _query_to_kwargs(q, pecan.request.storage_conn.get_resources)
resources = [ resources = [
Resource.from_db_and_links(r, Resource.from_db_and_links(r,
@ -1974,11 +1995,12 @@ class AlarmController(rest.RestController):
# TODO(eglynn): add pagination marker to signature once overall # TODO(eglynn): add pagination marker to signature once overall
# API support for pagination is finalized # API support for pagination is finalized
@wsme_pecan.wsexpose([AlarmChange], [Query]) @wsme_pecan.wsexpose([AlarmChange], [Query])
def history(self, q=[]): def history(self, q=None):
"""Assembles the alarm history requested. """Assembles the alarm history requested.
:param q: Filter rules for the changes to be described. :param q: Filter rules for the changes to be described.
""" """
q = q or []
# allow history to be returned for deleted alarms, but scope changes # allow history to be returned for deleted alarms, but scope changes
# returned to those carried out on behalf of the auth'd tenant, to # returned to those carried out on behalf of the auth'd tenant, to
# avoid inappropriate cross-tenant visibility of alarm history # avoid inappropriate cross-tenant visibility of alarm history
@ -2105,11 +2127,12 @@ class AlarmsController(rest.RestController):
return Alarm.from_db_model(alarm) return Alarm.from_db_model(alarm)
@wsme_pecan.wsexpose([Alarm], [Query]) @wsme_pecan.wsexpose([Alarm], [Query])
def get_all(self, q=[]): def get_all(self, q=None):
"""Return all alarms, based on the query provided. """Return all alarms, based on the query provided.
:param q: Filter rules for the alarms to be returned. :param q: Filter rules for the alarms to be returned.
""" """
q = q or []
#Timestamp is not supported field for Simple Alarm queries #Timestamp is not supported field for Simple Alarm queries
kwargs = _query_to_kwargs(q, kwargs = _query_to_kwargs(q,
pecan.request.storage_conn.get_alarms, pecan.request.storage_conn.get_alarms,
@ -2310,11 +2333,12 @@ class EventsController(rest.RestController):
@requires_admin @requires_admin
@wsme_pecan.wsexpose([Event], [EventQuery]) @wsme_pecan.wsexpose([Event], [EventQuery])
def get_all(self, q=[]): def get_all(self, q=None):
"""Return all events matching the query filters. """Return all events matching the query filters.
:param q: Filter arguments for which Events to return :param q: Filter arguments for which Events to return
""" """
q = q or []
event_filter = _event_query_to_event_filter(q) event_filter = _event_query_to_event_filter(q)
return [Event(message_id=event.message_id, return [Event(message_id=event.message_id,
event_type=event.event_type, event_type=event.event_type,

View File

@ -77,7 +77,8 @@ def _get_metadata_from_object(instance):
def make_sample_from_instance(instance, name, type, unit, volume, def make_sample_from_instance(instance, name, type, unit, volume,
additional_metadata={}): additional_metadata=None):
additional_metadata = additional_metadata or {}
resource_metadata = _get_metadata_from_object(instance) resource_metadata = _get_metadata_from_object(instance)
resource_metadata.update(additional_metadata) resource_metadata.update(additional_metadata)
return sample.Sample( return sample.Sample(

View File

@ -82,7 +82,7 @@ class _Base(plugin.CentralPollster):
class EnergyPollster(_Base): class EnergyPollster(_Base):
"""Measures energy consumption.""" """Measures energy consumption."""
def get_samples(self, manager, cache, resources=[]): def get_samples(self, manager, cache, resources=None):
"""Returns all samples.""" """Returns all samples."""
for probe in self._iter_probes(manager.keystone, cache): for probe in self._iter_probes(manager.keystone, cache):
yield sample.Sample( yield sample.Sample(
@ -102,7 +102,7 @@ class EnergyPollster(_Base):
class PowerPollster(_Base): class PowerPollster(_Base):
"""Measures power consumption.""" """Measures power consumption."""
def get_samples(self, manager, cache, resources=[]): def get_samples(self, manager, cache, resources=None):
"""Returns all samples.""" """Returns all samples."""
for probe in self._iter_probes(manager.keystone, cache): for probe in self._iter_probes(manager.keystone, cache):
yield sample.Sample( yield sample.Sample(

View File

@ -44,13 +44,14 @@ class HardwarePollster(plugin.CentralPollster):
super(HardwarePollster, self).__init__() super(HardwarePollster, self).__init__()
self.inspectors = {} self.inspectors = {}
def get_samples(self, manager, cache, resources=[]): def get_samples(self, manager, cache, resources=None):
"""Return an iterable of Sample instances from polling the resources. """Return an iterable of Sample instances from polling the resources.
:param manager: The service manager invoking the plugin :param manager: The service manager invoking the plugin
:param cache: A dictionary for passing data between plugins :param cache: A dictionary for passing data between plugins
:param resources: end point to poll data from :param resources: end point to poll data from
""" """
resources = resources or []
h_cache = cache.setdefault(self.CACHE_KEY, {}) h_cache = cache.setdefault(self.CACHE_KEY, {})
sample_iters = [] sample_iters = []
for res in resources: for res in resources:

View File

@ -104,7 +104,7 @@ class _Base(plugin.PollsterBase):
class ImagePollster(_Base): class ImagePollster(_Base):
def get_samples(self, manager, cache, resources=[]): def get_samples(self, manager, cache, resources=None):
for image in self._iter_images(manager.keystone, cache): for image in self._iter_images(manager.keystone, cache):
yield sample.Sample( yield sample.Sample(
name='image', name='image',
@ -121,7 +121,7 @@ class ImagePollster(_Base):
class ImageSizePollster(_Base): class ImageSizePollster(_Base):
def get_samples(self, manager, cache, resources=[]): def get_samples(self, manager, cache, resources=None):
for image in self._iter_images(manager.keystone, cache): for image in self._iter_images(manager.keystone, cache):
yield sample.Sample( yield sample.Sample(
name='image.size', name='image.size',

View File

@ -40,7 +40,7 @@ class FloatingIPPollster(plugin.CentralPollster):
cache['floating_ips'] = list(self._get_floating_ips()) cache['floating_ips'] = list(self._get_floating_ips())
return iter(cache['floating_ips']) return iter(cache['floating_ips'])
def get_samples(self, manager, cache, resources=[]): def get_samples(self, manager, cache, resources=None):
for ip in self._iter_floating_ips(cache): for ip in self._iter_floating_ips(cache):
self.LOG.info(_("FLOATING IP USAGE: %s") % ip.ip) self.LOG.info(_("FLOATING IP USAGE: %s") % ip.ip)
# FIXME (flwang) Now Nova API /os-floating-ips can't provide those # FIXME (flwang) Now Nova API /os-floating-ips can't provide those

View File

@ -63,7 +63,8 @@ class _Base(plugin.CentralPollster):
scheme).driver() scheme).driver()
return _Base.drivers[scheme] return _Base.drivers[scheme]
def get_samples(self, manager, cache, resources=[]): def get_samples(self, manager, cache, resources=None):
resources = resources or []
for resource in resources: for resource in resources:
parse_url, params = self._parse_my_resource(resource) parse_url, params = self._parse_my_resource(resource)
ext = self.get_driver(parse_url.scheme) ext = self.get_driver(parse_url.scheme)

View File

@ -88,7 +88,7 @@ class ObjectsPollster(_Base):
"""Iterate over all accounts, using keystone. """Iterate over all accounts, using keystone.
""" """
def get_samples(self, manager, cache, resources=[]): def get_samples(self, manager, cache, resources=None):
for tenant, account in self._iter_accounts(manager.keystone, cache): for tenant, account in self._iter_accounts(manager.keystone, cache):
yield sample.Sample( yield sample.Sample(
name='storage.objects', name='storage.objects',
@ -107,7 +107,7 @@ class ObjectsSizePollster(_Base):
"""Iterate over all accounts, using keystone. """Iterate over all accounts, using keystone.
""" """
def get_samples(self, manager, cache, resources=[]): def get_samples(self, manager, cache, resources=None):
for tenant, account in self._iter_accounts(manager.keystone, cache): for tenant, account in self._iter_accounts(manager.keystone, cache):
yield sample.Sample( yield sample.Sample(
name='storage.objects.size', name='storage.objects.size',
@ -126,7 +126,7 @@ class ObjectsContainersPollster(_Base):
"""Iterate over all accounts, using keystone. """Iterate over all accounts, using keystone.
""" """
def get_samples(self, manager, cache, resources=[]): def get_samples(self, manager, cache, resources=None):
for tenant, account in self._iter_accounts(manager.keystone, cache): for tenant, account in self._iter_accounts(manager.keystone, cache):
yield sample.Sample( yield sample.Sample(
name='storage.objects.containers', name='storage.objects.containers',
@ -147,7 +147,7 @@ class ContainersObjectsPollster(_Base):
METHOD = 'get' METHOD = 'get'
def get_samples(self, manager, cache, resources=[]): def get_samples(self, manager, cache, resources=None):
for project, account in self._iter_accounts(manager.keystone, cache): for project, account in self._iter_accounts(manager.keystone, cache):
containers_info = account[1] containers_info = account[1]
for container in containers_info: for container in containers_info:
@ -170,7 +170,7 @@ class ContainersSizePollster(_Base):
METHOD = 'get' METHOD = 'get'
def get_samples(self, manager, cache, resources=[]): def get_samples(self, manager, cache, resources=None):
for project, account in self._iter_accounts(manager.keystone, cache): for project, account in self._iter_accounts(manager.keystone, cache):
containers_info = account[1] containers_info = account[1]
for container in containers_info: for container in containers_info:

View File

@ -55,7 +55,8 @@ class PipelineException(Exception):
class PublishContext(object): class PublishContext(object):
def __init__(self, context, pipelines=[]): def __init__(self, context, pipelines=None):
pipelines = pipelines or []
self.pipelines = set(pipelines) self.pipelines = set(pipelines)
self.context = context self.context = context

View File

@ -130,7 +130,7 @@ class PollsterBase(PluginBase):
"""Base class for plugins that support the polling API.""" """Base class for plugins that support the polling API."""
@abc.abstractmethod @abc.abstractmethod
def get_samples(self, manager, cache, resources=[]): def get_samples(self, manager, cache, resources=None):
"""Return a sequence of Counter instances from polling the resources. """Return a sequence of Counter instances from polling the resources.
:param manager: The service manager class invoking the plugin. :param manager: The service manager class invoking the plugin.

View File

@ -104,7 +104,7 @@ class SampleFilter(object):
end=None, end_timestamp_op=None, end=None, end_timestamp_op=None,
resource=None, meter=None, resource=None, meter=None,
source=None, message_id=None, source=None, message_id=None,
metaquery={}): metaquery=None):
self.user = user self.user = user
self.project = project self.project = project
self.start = utils.sanitize_timestamp(start) self.start = utils.sanitize_timestamp(start)
@ -114,7 +114,7 @@ class SampleFilter(object):
self.resource = resource self.resource = resource
self.meter = meter self.meter = meter
self.source = source self.source = source
self.metaquery = metaquery self.metaquery = metaquery or {}
self.message_id = message_id self.message_id = message_id
@ -137,12 +137,12 @@ class EventFilter(object):
""" """
def __init__(self, start_time=None, end_time=None, event_type=None, def __init__(self, start_time=None, end_time=None, event_type=None,
message_id=None, traits_filter=[]): message_id=None, traits_filter=None):
self.start_time = utils.sanitize_timestamp(start_time) self.start_time = utils.sanitize_timestamp(start_time)
self.end_time = utils.sanitize_timestamp(end_time) self.end_time = utils.sanitize_timestamp(end_time)
self.message_id = message_id self.message_id = message_id
self.event_type = event_type self.event_type = event_type
self.traits_filter = traits_filter self.traits_filter = traits_filter or []
def __repr__(self): def __repr__(self):
return ("<EventFilter(start_time: %s," return ("<EventFilter(start_time: %s,"

View File

@ -83,8 +83,8 @@ class NoResultFound(Exception):
class Pagination(object): class Pagination(object):
"""Class for pagination query.""" """Class for pagination query."""
def __init__(self, limit=None, primary_sort_dir='desc', sort_keys=[], def __init__(self, limit=None, primary_sort_dir='desc', sort_keys=None,
sort_dirs=[], marker_value=None): sort_dirs=None, marker_value=None):
"""This puts all parameters used for paginate query together. """This puts all parameters used for paginate query together.
:param limit: Maximum number of items to return; :param limit: Maximum number of items to return;
@ -99,8 +99,8 @@ class Pagination(object):
self.limit = limit self.limit = limit
self.primary_sort_dir = primary_sort_dir self.primary_sort_dir = primary_sort_dir
self.marker_value = marker_value self.marker_value = marker_value
self.sort_keys = sort_keys self.sort_keys = sort_keys or []
self.sort_dirs = sort_dirs self.sort_dirs = sort_dirs or []
class Connection(object): class Connection(object):
@ -177,7 +177,7 @@ class Connection(object):
def get_resources(user=None, project=None, source=None, def get_resources(user=None, project=None, source=None,
start_timestamp=None, start_timestamp_op=None, start_timestamp=None, start_timestamp_op=None,
end_timestamp=None, end_timestamp_op=None, end_timestamp=None, end_timestamp_op=None,
metaquery={}, resource=None, pagination=None): metaquery=None, resource=None, pagination=None):
"""Return an iterable of models.Resource instances containing """Return an iterable of models.Resource instances containing
resource information. resource information.
@ -196,7 +196,7 @@ class Connection(object):
@staticmethod @staticmethod
def get_meters(user=None, project=None, resource=None, source=None, def get_meters(user=None, project=None, resource=None, source=None,
metaquery={}, pagination=None): metaquery=None, pagination=None):
"""Return an iterable of model.Meter instances containing meter """Return an iterable of model.Meter instances containing meter
information. information.

View File

@ -127,7 +127,7 @@ class Connection(pymongo_base.Connection):
self.upgrade() self.upgrade()
@classmethod @classmethod
def _build_sort_instructions(cls, sort_keys=[], sort_dir='desc'): def _build_sort_instructions(cls, sort_keys=None, sort_dir='desc'):
"""Returns a sort_instruction. """Returns a sort_instruction.
Sort instructions are used in the query to determine what attributes Sort instructions are used in the query to determine what attributes
@ -137,6 +137,7 @@ class Connection(pymongo_base.Connection):
:param sort_dir: direction in which results be sorted (asc, desc). :param sort_dir: direction in which results be sorted (asc, desc).
:return: sort parameters :return: sort parameters
""" """
sort_keys = sort_keys or []
sort_instructions = [] sort_instructions = []
_sort_dir = cls.SORT_OPERATION_MAP.get( _sort_dir = cls.SORT_OPERATION_MAP.get(
sort_dir, cls.SORT_OPERATION_MAP['desc']) sort_dir, cls.SORT_OPERATION_MAP['desc'])
@ -235,7 +236,7 @@ class Connection(pymongo_base.Connection):
def get_resources(self, user=None, project=None, source=None, def get_resources(self, user=None, project=None, source=None,
start_timestamp=None, start_timestamp_op=None, start_timestamp=None, start_timestamp_op=None,
end_timestamp=None, end_timestamp_op=None, end_timestamp=None, end_timestamp_op=None,
metaquery={}, resource=None, pagination=None): metaquery=None, resource=None, pagination=None):
"""Return an iterable of models.Resource instances """Return an iterable of models.Resource instances
:param user: Optional ID for user that owns the resource. :param user: Optional ID for user that owns the resource.
@ -252,6 +253,8 @@ class Connection(pymongo_base.Connection):
if pagination: if pagination:
raise NotImplementedError('Pagination not implemented') raise NotImplementedError('Pagination not implemented')
metaquery = metaquery or {}
q = {} q = {}
if user is not None: if user is not None:
q['user_id'] = user q['user_id'] = user

View File

@ -310,7 +310,7 @@ class Connection(base.Connection):
def get_resources(self, user=None, project=None, source=None, def get_resources(self, user=None, project=None, source=None,
start_timestamp=None, start_timestamp_op=None, start_timestamp=None, start_timestamp_op=None,
end_timestamp=None, end_timestamp_op=None, end_timestamp=None, end_timestamp_op=None,
metaquery={}, resource=None, pagination=None): metaquery=None, resource=None, pagination=None):
"""Return an iterable of models.Resource instances """Return an iterable of models.Resource instances
:param user: Optional ID for user that owns the resource. :param user: Optional ID for user that owns the resource.
@ -327,6 +327,8 @@ class Connection(base.Connection):
if pagination: if pagination:
raise NotImplementedError('Pagination not implemented') raise NotImplementedError('Pagination not implemented')
metaquery = metaquery or {}
sample_filter = storage.SampleFilter( sample_filter = storage.SampleFilter(
user=user, project=project, user=user, project=project,
start=start_timestamp, start_timestamp_op=start_timestamp_op, start=start_timestamp, start_timestamp_op=start_timestamp_op,
@ -368,7 +370,7 @@ class Connection(base.Connection):
) )
def get_meters(self, user=None, project=None, resource=None, source=None, def get_meters(self, user=None, project=None, resource=None, source=None,
metaquery={}, pagination=None): metaquery=None, pagination=None):
"""Return an iterable of models.Meter instances """Return an iterable of models.Meter instances
:param user: Optional ID for user that owns the resource. :param user: Optional ID for user that owns the resource.
@ -379,6 +381,8 @@ class Connection(base.Connection):
:param pagination: Optional pagination query. :param pagination: Optional pagination query.
""" """
metaquery = metaquery or {}
if pagination: if pagination:
raise NotImplementedError(_('Pagination not implemented')) raise NotImplementedError(_('Pagination not implemented'))
with self.conn_pool.connection() as conn: with self.conn_pool.connection() as conn:
@ -568,7 +572,8 @@ class MTable(object):
def delete(self, key): def delete(self, key):
del self._rows[key] del self._rows[key]
def scan(self, filter=None, columns=[], row_start=None, row_stop=None): def scan(self, filter=None, columns=None, row_start=None, row_stop=None):
columns = columns or []
sorted_keys = sorted(self._rows) sorted_keys = sorted(self._rows)
# copy data between row_start and row_stop into a dict # copy data between row_start and row_stop into a dict
rows = {} rows = {}
@ -661,7 +666,8 @@ class MConnection(object):
def open(self): def open(self):
LOG.debug(_("Opening in-memory HBase connection")) LOG.debug(_("Opening in-memory HBase connection"))
def create_table(self, n, families={}): def create_table(self, n, families=None):
families = families or {}
if n in self.tables: if n in self.tables:
return self.tables[n] return self.tables[n]
t = MTable(n, families) t = MTable(n, families)
@ -879,12 +885,13 @@ def deserialize_entry(entry, get_raw_meta=True):
return flatten_result, sources, meters, metadata return flatten_result, sources, meters, metadata
def serialize_entry(data={}, **kwargs): def serialize_entry(data=None, **kwargs):
"""Return a dict that is ready to be stored to HBase """Return a dict that is ready to be stored to HBase
:param data: dict to be serialized :param data: dict to be serialized
:param kwargs: additional args :param kwargs: additional args
""" """
data = data or {}
entry_dict = copy.copy(data) entry_dict = copy.copy(data)
entry_dict.update(**kwargs) entry_dict.update(**kwargs)

View File

@ -59,7 +59,7 @@ class Connection(base.Connection):
def get_resources(self, user=None, project=None, source=None, def get_resources(self, user=None, project=None, source=None,
start_timestamp=None, start_timestamp_op=None, start_timestamp=None, start_timestamp_op=None,
end_timestamp=None, end_timestamp_op=None, end_timestamp=None, end_timestamp_op=None,
metaquery={}, resource=None, pagination=None): metaquery=None, resource=None, pagination=None):
"""Return an iterable of dictionaries containing resource information. """Return an iterable of dictionaries containing resource information.
{ 'resource_id': UUID of the resource, { 'resource_id': UUID of the resource,
@ -84,7 +84,7 @@ class Connection(base.Connection):
return [] return []
def get_meters(self, user=None, project=None, resource=None, source=None, def get_meters(self, user=None, project=None, resource=None, source=None,
limit=None, metaquery={}, pagination=None): limit=None, metaquery=None, pagination=None):
"""Return an iterable of dictionaries containing meter information. """Return an iterable of dictionaries containing meter information.
{ 'name': name of the meter, { 'name': name of the meter,

View File

@ -586,7 +586,7 @@ class Connection(pymongo_base.Connection):
return dict(criteria_equ, ** criteria_cmp) return dict(criteria_equ, ** criteria_cmp)
@classmethod @classmethod
def _build_paginate_query(cls, marker, sort_keys=[], sort_dir='desc'): def _build_paginate_query(cls, marker, sort_keys=None, sort_dir='desc'):
"""Returns a query with sorting / pagination. """Returns a query with sorting / pagination.
Pagination works by requiring sort_key and sort_dir. Pagination works by requiring sort_key and sort_dir.
@ -600,6 +600,7 @@ class Connection(pymongo_base.Connection):
:return: sort parameters, query to use :return: sort parameters, query to use
""" """
all_sort = [] all_sort = []
sort_keys = sort_keys or []
all_sort, _op = cls._build_sort_instructions(sort_keys, sort_dir) all_sort, _op = cls._build_sort_instructions(sort_keys, sort_dir)
if marker is not None: if marker is not None:
@ -625,7 +626,7 @@ class Connection(pymongo_base.Connection):
return all_sort, metaquery return all_sort, metaquery
@classmethod @classmethod
def _build_sort_instructions(cls, sort_keys=[], sort_dir='desc'): def _build_sort_instructions(cls, sort_keys=None, sort_dir='desc'):
"""Returns a sort_instruction and paging operator. """Returns a sort_instruction and paging operator.
Sort instructions are used in the query to determine what attributes Sort instructions are used in the query to determine what attributes
@ -635,6 +636,7 @@ class Connection(pymongo_base.Connection):
:param sort_dir: direction in which results be sorted (asc, desc). :param sort_dir: direction in which results be sorted (asc, desc).
:return: sort instructions and paging operator :return: sort instructions and paging operator
""" """
sort_keys = sort_keys or []
sort_instructions = [] sort_instructions = []
_sort_dir, operation = cls.SORT_OPERATION_MAPPING.get( _sort_dir, operation = cls.SORT_OPERATION_MAPPING.get(
sort_dir, cls.SORT_OPERATION_MAPPING['desc']) sort_dir, cls.SORT_OPERATION_MAPPING['desc'])
@ -647,7 +649,7 @@ class Connection(pymongo_base.Connection):
@classmethod @classmethod
def paginate_query(cls, q, db_collection, limit=None, marker=None, def paginate_query(cls, q, db_collection, limit=None, marker=None,
sort_keys=[], sort_dir='desc'): sort_keys=None, sort_dir='desc'):
"""Returns a query result with sorting / pagination. """Returns a query result with sorting / pagination.
Pagination works by requiring sort_key and sort_dir. Pagination works by requiring sort_key and sort_dir.
@ -663,6 +665,7 @@ class Connection(pymongo_base.Connection):
return: The query with sorting/pagination added. return: The query with sorting/pagination added.
""" """
sort_keys = sort_keys or []
all_sort, query = cls._build_paginate_query(marker, all_sort, query = cls._build_paginate_query(marker,
sort_keys, sort_keys,
sort_dir) sort_dir)
@ -772,7 +775,7 @@ class Connection(pymongo_base.Connection):
def get_resources(self, user=None, project=None, source=None, def get_resources(self, user=None, project=None, source=None,
start_timestamp=None, start_timestamp_op=None, start_timestamp=None, start_timestamp_op=None,
end_timestamp=None, end_timestamp_op=None, end_timestamp=None, end_timestamp_op=None,
metaquery={}, resource=None, pagination=None): metaquery=None, resource=None, pagination=None):
"""Return an iterable of models.Resource instances """Return an iterable of models.Resource instances
:param user: Optional ID for user that owns the resource. :param user: Optional ID for user that owns the resource.
@ -789,6 +792,8 @@ class Connection(pymongo_base.Connection):
if pagination: if pagination:
raise NotImplementedError('Pagination not implemented') raise NotImplementedError('Pagination not implemented')
metaquery = metaquery or {}
query = {} query = {}
if user is not None: if user is not None:
query['user_id'] = user query['user_id'] = user

View File

@ -365,7 +365,7 @@ class Connection(base.Connection):
def get_resources(self, user=None, project=None, source=None, def get_resources(self, user=None, project=None, source=None,
start_timestamp=None, start_timestamp_op=None, start_timestamp=None, start_timestamp_op=None,
end_timestamp=None, end_timestamp_op=None, end_timestamp=None, end_timestamp_op=None,
metaquery={}, resource=None, pagination=None): metaquery=None, resource=None, pagination=None):
"""Return an iterable of api_models.Resource instances """Return an iterable of api_models.Resource instances
:param user: Optional ID for user that owns the resource. :param user: Optional ID for user that owns the resource.
@ -382,6 +382,8 @@ class Connection(base.Connection):
if pagination: if pagination:
raise NotImplementedError('Pagination not implemented') raise NotImplementedError('Pagination not implemented')
metaquery = metaquery or {}
def _apply_filters(query): def _apply_filters(query):
# TODO(gordc) this should be merged with make_query_from_filter # TODO(gordc) this should be merged with make_query_from_filter
for column, value in [(models.Sample.resource_id, resource), for column, value in [(models.Sample.resource_id, resource),
@ -442,7 +444,7 @@ class Connection(base.Connection):
) )
def get_meters(self, user=None, project=None, resource=None, source=None, def get_meters(self, user=None, project=None, resource=None, source=None,
metaquery={}, pagination=None): metaquery=None, pagination=None):
"""Return an iterable of api_models.Meter instances """Return an iterable of api_models.Meter instances
:param user: Optional ID for user that owns the resource. :param user: Optional ID for user that owns the resource.
@ -456,6 +458,8 @@ class Connection(base.Connection):
if pagination: if pagination:
raise NotImplementedError('Pagination not implemented') raise NotImplementedError('Pagination not implemented')
metaquery = metaquery or {}
def _apply_filters(query): def _apply_filters(query):
# TODO(gordc) this should be merged with make_query_from_filter # TODO(gordc) this should be merged with make_query_from_filter
for column, value in [(models.Sample.resource_id, resource), for column, value in [(models.Sample.resource_id, resource),

View File

@ -147,7 +147,7 @@ class Connection(base.Connection):
COMMON_AVAILABLE_CAPABILITIES) COMMON_AVAILABLE_CAPABILITIES)
def get_meters(self, user=None, project=None, resource=None, source=None, def get_meters(self, user=None, project=None, resource=None, source=None,
metaquery={}, pagination=None): metaquery=None, pagination=None):
"""Return an iterable of models.Meter instances """Return an iterable of models.Meter instances
:param user: Optional ID for user that owns the resource. :param user: Optional ID for user that owns the resource.
@ -161,6 +161,8 @@ class Connection(base.Connection):
if pagination: if pagination:
raise NotImplementedError('Pagination not implemented') raise NotImplementedError('Pagination not implemented')
metaquery = metaquery or {}
q = {} q = {}
if user is not None: if user is not None:
q['user_id'] = user q['user_id'] = user

View File

@ -72,7 +72,8 @@ default_test_data = TestSample(
class TestPollster(plugin.PollsterBase): class TestPollster(plugin.PollsterBase):
test_data = default_test_data test_data = default_test_data
def get_samples(self, manager, cache, resources=[]): def get_samples(self, manager, cache, resources=None):
resources = resources or []
self.samples.append((manager, resources)) self.samples.append((manager, resources))
self.resources.extend(resources) self.resources.extend(resources)
c = copy.copy(self.test_data) c = copy.copy(self.test_data)
@ -81,7 +82,8 @@ class TestPollster(plugin.PollsterBase):
class TestPollsterException(TestPollster): class TestPollsterException(TestPollster):
def get_samples(self, manager, cache, resources=[]): def get_samples(self, manager, cache, resources=None):
resources = resources or []
self.samples.append((manager, resources)) self.samples.append((manager, resources))
self.resources.extend(resources) self.resources.extend(resources)
raise Exception() raise Exception()

View File

@ -138,7 +138,7 @@ class FunctionalTest(db_test_base.TestBase):
return response return response
def get_json(self, path, expect_errors=False, headers=None, def get_json(self, path, expect_errors=False, headers=None,
extra_environ=None, q=[], groupby=[], status=None, extra_environ=None, q=None, groupby=None, status=None,
override_params=None, **params): override_params=None, **params):
"""Sends simulated HTTP GET request to Pecan test app. """Sends simulated HTTP GET request to Pecan test app.
@ -155,6 +155,8 @@ class FunctionalTest(db_test_base.TestBase):
:param override_params: literally encoded query param string :param override_params: literally encoded query param string
:param params: content for wsgi.input of request :param params: content for wsgi.input of request
""" """
q = q or []
groupby = groupby or []
full_path = self.PATH_PREFIX + path full_path = self.PATH_PREFIX + path
if override_params: if override_params:
all_params = override_params all_params = override_params

View File

@ -108,11 +108,11 @@ class TestAPIACL(FunctionalTest,
self.conn.record_metering_data(msg) self.conn.record_metering_data(msg)
def get_json(self, path, expect_errors=False, headers=None, def get_json(self, path, expect_errors=False, headers=None,
q=[], **params): q=None, **params):
return super(TestAPIACL, self).get_json(path, return super(TestAPIACL, self).get_json(path,
expect_errors=expect_errors, expect_errors=expect_errors,
headers=headers, headers=headers,
q=q, q=q or [],
extra_environ=self.environ, extra_environ=self.environ,
**params) **params)

View File

@ -29,11 +29,11 @@ from ceilometer import storage as storage
class FakeComplexQuery(api.ValidatedComplexQuery): class FakeComplexQuery(api.ValidatedComplexQuery):
def __init__(self, db_model, additional_name_mapping={}, metadata=False): def __init__(self, db_model, additional_name_mapping=None, metadata=False):
super(FakeComplexQuery, self).__init__(query=None, super(FakeComplexQuery, self).__init__(query=None,
db_model=db_model, db_model=db_model,
additional_name_mapping= additional_name_mapping=
additional_name_mapping, additional_name_mapping or {},
metadata_allowed=metadata) metadata_allowed=metadata)

View File

@ -55,7 +55,7 @@ class ScalingTransformer(transformer.TransformerBase):
"""Transformer to apply a scaling conversion. """Transformer to apply a scaling conversion.
""" """
def __init__(self, source={}, target={}, **kwargs): def __init__(self, source=None, target=None, **kwargs):
"""Initialize transformer with configured parameters. """Initialize transformer with configured parameters.
:param source: dict containing source sample unit :param source: dict containing source sample unit
@ -63,6 +63,8 @@ class ScalingTransformer(transformer.TransformerBase):
unit and scaling factor (a missing value unit and scaling factor (a missing value
connotes no change) connotes no change)
""" """
source = source or {}
target = target or {}
self.source = source self.source = source
self.target = target self.target = target
self.scale = target.get('scale') self.scale = target.get('scale')