Fix list of modules not included in auto-gen docs

The variable for excluding modules should be a sequence of strings.
Turn the current value from a string to a tuple so documentation
for our source is generated.

The errors and warnings were fixed in the docstrings in the source code
otherwise the doc generation would fail.

The following files were excluded from the doc build because of
non-existing imports:

 'ceilometer.compute.nova_notifier'
    http://bit.ly/remove-nova-notifier-bp
 'ceilometer.openstack.common.db.sqlalchemy.session'
    https://review.openstack.org/#/c/97850/
 'ceilometer.openstack.common.middleware.audit'
 'ceilometer.openstack.common.middleware.notifier'
    https://bugs.launchpad.net/ceilometer/+bug/1327084
 'ceilometer.openstack.common.log_handler'
    https://bugs.launchpad.net/ceilometer/+bug/1327076

These failed imports are registered in one blueprint and two bug reports.

Change-Id: If0bc1c8fc96ba513bbeb90d5257e40b7621a8473
This commit is contained in:
Ildiko Vancsa 2014-06-09 18:13:04 +02:00 committed by Ildiko
parent 3efe03b60a
commit 702d99937e
11 changed files with 269 additions and 216 deletions

View File

@ -240,17 +240,16 @@ class AlarmNotifierService(os_service.Service):
def notify_alarm(self, context, data):
"""Notify that alarm has been triggered.
data should be a dict with the following keys:
- actions, the URL of the action to run;
this is a mapped to extensions automatically
- alarm_id, the ID of the alarm that has been triggered
- previous, the previous state of the alarm
- current, the new state the alarm has transitioned to
- reason, the reason the alarm changed its state
- reason_data, a dict representation of the reason
:param context: Request context.
:param data: (dict):
:param context: Request context.
:param data: A dict as described above.
- actions, the URL of the action to run; this is mapped to
extensions automatically
- alarm_id, the ID of the alarm that has been triggered
- previous, the previous state of the alarm
- current, the new state the alarm has transitioned to
- reason, the reason the alarm changed its state
- reason_data, a dict representation of the reason
"""
actions = data.get('actions')
if not actions:

View File

@ -251,71 +251,74 @@ class NotificationEventsConverter(object):
notification will be processed according to the LAST definition that
matches it's event_type. (We use the last matching definition because that
allows you to use YAML merge syntax in the definitions file.)
Each definition is a dictionary with the following keys (all are required):
event_type: this is a list of notification event_types this definition
will handle. These can be wildcarded with unix shell glob
(not regex!) wildcards.
An exclusion listing (starting with a '!') will exclude any
types listed from matching. If ONLY exclusions are listed,
the definition will match anything not matching the
exclusions.
This item can also be a string, which will be taken as
equivalent to 1 item list.
Each definition is a dictionary with the following keys (all are
required):
Examples:
* ['compute.instance.exists'] will only match
compute.intance.exists notifications
* "compute.instance.exists" Same as above.
* ["image.create", "image.delete"] will match
image.create and image.delete, but not anything else.
* 'compute.instance.*" will match
compute.instance.create.start but not image.upload
* ['*.start','*.end', '!scheduler.*'] will match
compute.instance.create.start, and image.delete.end,
but NOT compute.instance.exists or
scheduler.run_instance.start
* '!image.*' matches any notification except image
notifications.
* ['*', '!image.*'] same as above.
traits: dictionary, The keys are trait names, the values are the trait
definitions
Each trait definition is a dictionary with the following keys:
type (optional): The data type for this trait. (as a string)
Valid options are: 'text', 'int', 'float' and 'datetime'
defaults to 'text' if not specified.
fields: a path specification for the field(s) in the
notification you wish to extract. The paths can be
specified with a dot syntax (e.g. 'payload.host').
dictionary syntax (e.g. 'payload[host]') is also supported.
in either case, if the key for the field you are looking
for contains special charecters, like '.', it will need to
be quoted (with double or single quotes) like so:
- event_type: this is a list of notification event_types this definition
will handle. These can be wildcarded with unix shell glob (not regex!)
wildcards.
An exclusion listing (starting with a '!') will exclude any types listed
from matching. If ONLY exclusions are listed, the definition will match
anything not matching the exclusions.
This item can also be a string, which will be taken as equivalent to 1
item list.
"payload.image_meta.'org.openstack__1__architecture'"
Examples:
The syntax used for the field specification is a variant
of JSONPath, and is fairly flexible.
(see: https://github.com/kennknowles/python-jsonpath-rw
for more info) Specifications can be written to match
multiple possible fields, the value for the trait will
be derived from the matching fields that exist and have
a non-null (i.e. is not None) values in the notification.
By default the value will be the first such field.
(plugins can alter that, if they wish)
* ['compute.instance.exists'] will only match
compute.intance.exists notifications
* "compute.instance.exists" Same as above.
* ["image.create", "image.delete"] will match
image.create and image.delete, but not anything else.
* "compute.instance.*" will match
compute.instance.create.start but not image.upload
* ['*.start','*.end', '!scheduler.*'] will match
compute.instance.create.start, and image.delete.end,
but NOT compute.instance.exists or
scheduler.run_instance.start
* '!image.*' matches any notification except image
notifications.
* ['*', '!image.*'] same as above.
This configuration value is normally a string, for
convenience, it can be specified as a list of
specifications, which will be OR'ed together (a union
query in jsonpath terms)
plugin (optional): (dictionary) with the following keys:
name: (string) name of a plugin to load
parameters: (optional) Dictionary of keyword args to pass
to the plugin on initialization.
See documentation on each plugin to see what
arguments it accepts.
For convenience, this value can also be specified as a
string, which is interpreted as a plugin name, which will
be loaded with no parameters.
- traits: (dict) The keys are trait names, the values are the trait
definitions. Each trait definition is a dictionary with the following
keys:
- type (optional): The data type for this trait. (as a string)
Valid options are: 'text', 'int', 'float' and 'datetime', defaults to
'text' if not specified.
- fields: a path specification for the field(s) in the notification you
wish to extract. The paths can be specified with a dot syntax
(e.g. 'payload.host') or dictionary syntax (e.g. 'payload[host]') is
also supported.
In either case, if the key for the field you are looking for contains
special charecters, like '.', it will need to be quoted (with double
or single quotes) like so::
"payload.image_meta.'org.openstack__1__architecture'"
The syntax used for the field specification is a variant of JSONPath,
and is fairly flexible.
(see: https://github.com/kennknowles/python-jsonpath-rw for more info)
Specifications can be written to match multiple possible fields, the
value for the trait will be derived from the matching fields that
exist and have a non-null (i.e. is not None) values in the
notification.
By default the value will be the first such field. (plugins can alter
that, if they wish)
This configuration value is normally a string, for convenience, it can
be specified as a list of specifications, which will be OR'ed together
(a union query in jsonpath terms)
- plugin (optional): (dictionary) with the following keys:
- name: (string) name of a plugin to load
- parameters: (optional) Dictionary of keyword args to pass
to the plugin on initialization. See documentation on each plugin to
see what arguments it accepts.
For convenience, this value can also be specified as a string, which is
interpreted as a plugin name, which will be loaded with no parameters.
"""

View File

@ -51,24 +51,25 @@ class TraitPluginBase(object):
appropriate type for the trait.
:param match_list: A list (may be empty if no matches) of *tuples*.
Each tuple is (field_path, value) where field_path
is the jsonpath for that specific field,
Each tuple is (field_path, value) where field_path is the jsonpath
for that specific field.
Example::
Example:
trait's fields definition: ['payload.foobar',
'payload.baz',
'payload.thing.*']
notification body:
{
'message_id': '12345',
'publisher': 'someservice.host',
'payload': {
'foobar': 'test',
'thing': {
'bar': 12,
'boing': 13,
}
}
'message_id': '12345',
'publisher': 'someservice.host',
'payload': {
'foobar': 'test',
'thing': {
'bar': 12,
'boing': 13,
}
}
}
match_list will be: [('payload.foobar','test'),
('payload.thing.bar',12),
@ -76,16 +77,19 @@ class TraitPluginBase(object):
Here is a plugin that emulates the default (no plugin) behavior:
class DefaultPlugin(TraitPluginBase):
"Plugin that returns the first field value"
.. code-block:: python
def __init__(self, **kw):
super(DefaultPlugin, self).__init__()
class DefaultPlugin(TraitPluginBase):
"Plugin that returns the first field value."
def __init__(self, **kw):
super(DefaultPlugin, self).__init__()
def trait_value(self, match_list):
if not match_list:
return None
return match_list[0][1]
def trait_value(self, match_list):
if not match_list:
return None
return match_list[0][1]
"""

View File

@ -27,27 +27,30 @@ class OpencontrailDriver(driver.Driver):
This driver uses resources in "pipeline.yaml".
Resource requires below conditions:
* resource is url
* scheme is "opencontrail"
This driver can be configured via query parameters.
Supported parameters:
* scheme:
The scheme of request url to Opencontrail Analytics endpoint.
(default http)
* username:
This is username used by Opencontrail Analytics.(default None)
* password:
This is password used by Opencontrail Analytics.(default None)
* domain
This is domain used by Opencontrail Analytics.(default None)
* verify_ssl
Specify if the certificate will be checked for https request.
(default false)
e.g.
opencontrail://localhost:8143/?username=admin&password=admin&
scheme=https&domain=&verify_ssl=true
* scheme:
The scheme of request url to Opencontrail Analytics endpoint.
(default http)
* username:
This is username used by Opencontrail Analytics.(default None)
* password:
This is password used by Opencontrail Analytics.(default None)
* domain:
This is domain used by Opencontrail Analytics.(default None)
* verify_ssl:
Specify if the certificate will be checked for https request.
(default false)
e.g.::
opencontrail://localhost:8143/?username=admin&password=admin&
scheme=https&domain=&verify_ssl=true
"""
@staticmethod
def _prepare_cache(endpoint, params, cache):

View File

@ -51,33 +51,36 @@ class OpenDayLightDriver(driver.Driver):
This driver uses resources in "pipeline.yaml".
Resource requires below conditions:
* resource is url
* scheme is "opendaylight"
This driver can be configured via query parameters.
Supported parameters:
* scheme:
The scheme of request url to OpenDaylight REST API endpoint.
(default http)
The scheme of request url to OpenDaylight REST API endpoint.
(default http)
* auth:
Auth strategy of http.
This parameter can be set basic and digest.(default None)
Auth strategy of http.
This parameter can be set basic and digest.(default None)
* user:
This is username that is used by auth.(default None)
This is username that is used by auth.(default None)
* password:
This is password that is used by auth.(default None)
This is password that is used by auth.(default None)
* container_name:
Name of container of OpenDaylight.(default "default")
This parameter allows multi vaues.
Name of container of OpenDaylight.(default "default")
This parameter allows multi vaues.
e.g.
opendaylight://127.0.0.1:8080/controller/nb/v2?container_name=default&
container_name=egg&auth=basic&user=admin&password=admin&scheme=http
e.g.::
In this case, the driver send request to below URL:
http://127.0.0.1:8080/controller/nb/v2/statistics/default/flow
and
http://127.0.0.1:8080/controller/nb/v2/statistics/egg/flow
opendaylight://127.0.0.1:8080/controller/nb/v2?container_name=default&
container_name=egg&auth=basic&user=admin&password=admin&scheme=http
In this case, the driver send request to below URLs:
http://127.0.0.1:8080/controller/nb/v2/statistics/default/flow
http://127.0.0.1:8080/controller/nb/v2/statistics/egg/flow
"""
@staticmethod
def _prepare_cache(endpoint, params, cache):

View File

@ -26,15 +26,17 @@ Configuration:
In /etc/swift/proxy-server.conf on the main pipeline add "ceilometer" just
before "proxy-server" and add the following filter in the file:
[filter:ceilometer]
use = egg:ceilometer#swift
.. code-block:: python
# Some optional configuration
# this allow to publish additional metadata
metadata_headers = X-TEST
[filter:ceilometer]
use = egg:ceilometer#swift
# Set reseller prefix (defaults to "AUTH_" if not set)
reseller_prefix = AUTH_
# Some optional configuration
# this allow to publish additional metadata
metadata_headers = X-TEST
# Set reseller prefix (defaults to "AUTH_" if not set)
reseller_prefix = AUTH_
"""
from __future__ import absolute_import

View File

@ -34,8 +34,9 @@ class FilePublisher(publisher.PublisherBase):
If a file name and location is not specified, this File Publisher will not
log any meters other than log a warning in Ceilometer log file.
To enable this publisher, add the following section to file
/etc/ceilometer/publisher.yaml or simply add it to an existing pipeline.
To enable this publisher, add the following section to the
/etc/ceilometer/publisher.yaml file or simply add it to an existing
pipeline::
-
name: meter_file

View File

@ -124,14 +124,16 @@ class EventFilter(object):
:param event_type: the name of the event. None for all.
:param message_id: the message_id of the event. None for all.
:param traits_filter: the trait filter dicts, all of which are optional.
This parameter is a list of dictionaries that specify
trait values:
{'key': <key>,
'string': <value>,
'integer': <value>,
'datetime': <value>,
'float': <value>,
'op': <eq, lt, le, ne, gt or ge> }
This parameter is a list of dictionaries that specify trait values:
.. code-block:: python
{'key': <key>,
'string': <value>,
'integer': <value>,
'datetime': <value>,
'float': <value>,
'op': <eq, lt, le, ne, gt or ge> }
"""
def __init__(self, start_time=None, end_time=None, event_type=None,

View File

@ -71,65 +71,88 @@ class Connection(base.Connection):
Collections:
- meter (describes sample actually)
- row-key: consists of reversed timestamp, meter and an md5 of
user+resource+project for purposes of uniqueness
- Column Families:
f: contains the following qualifiers:
-counter_name : <name of counter>
-counter_type : <type of counter>
-counter_unit : <unit of counter>
-counter_volume : <volume of counter>
-message: <raw incoming data>
-message_id: <id of message>
-message_signature: <signature of message>
-resource_metadata: raw metadata for corresponding resource
of the meter
-project_id: <id of project>
-resource_id: <id of resource>
-user_id: <id of user>
-recorded_at: <datetime when sample has been recorded (utc.now)>
-flattened metadata with prefix r_metadata. e.g.
f:r_metadata.display_name or f:r_metadata.tag
-rts: <reversed timestamp of entry>
-timestamp: <meter's timestamp (came from message)>
-source for meter with prefix 's'
- meter (describes sample actually):
- row-key: consists of reversed timestamp, meter and an md5 of
user+resource+project for purposes of uniqueness
- Column Families:
f: contains the following qualifiers:
- counter_name: <name of counter>
- counter_type: <type of counter>
- counter_unit: <unit of counter>
- counter_volume: <volume of counter>
- message: <raw incoming data>
- message_id: <id of message>
- message_signature: <signature of message>
- resource_metadata: raw metadata for corresponding resource
of the meter
- project_id: <id of project>
- resource_id: <id of resource>
- user_id: <id of user>
- recorded_at: <datetime when sample has been recorded (utc.now)>
- flattened metadata with prefix r_metadata. e.g.::
f:r_metadata.display_name or f:r_metadata.tag
- rts: <reversed timestamp of entry>
- timestamp: <meter's timestamp (came from message)>
- source for meter with prefix 's'
- resource:
- resource
- row_key: uuid of resource
- Column Families:
f: contains the following qualifiers:
-resource_metadata: raw metadata for corresponding resource
-project_id: <id of project>
-resource_id: <id of resource>
-user_id: <id of user>
-flattened metadata with prefix r_metadata. e.g.
f:r_metadata.display_name or f:r_metadata.tag
-sources for all corresponding meters with prefix 's'
-all meters for this resource in format
"%s+%s+%s!%s!%s" % (rts, source, counter_name, counter_type,
counter_unit)
- alarm
f: contains the following qualifiers:
- resource_metadata: raw metadata for corresponding resource
- project_id: <id of project>
- resource_id: <id of resource>
- user_id: <id of user>
- flattened metadata with prefix r_metadata. e.g.::
f:r_metadata.display_name or f:r_metadata.tag
- sources for all corresponding meters with prefix 's'
- all meters for this resource in format:
.. code-block:: python
"%s+%s+%s!%s!%s" % (rts, source, counter_name, counter_type,
counter_unit)
- alarm:
- row_key: uuid of alarm
- Column Families:
f: contains the raw incoming alarm data
- alarm_h
f: contains the raw incoming alarm data
- alarm_h:
- row_key: uuid of alarm + "_" + reversed timestamp
- Column Families:
f: raw incoming alarm_history data. Timestamp becomes now()
if not determined
- events
f: raw incoming alarm_history data. Timestamp becomes now()
if not determined
- events:
- row_key: timestamp of event's generation + uuid of event
in format: "%s+%s" % (ts, Event.message_id)
-Column Families:
f: contains the following qualifiers:
-event_type: description of event's type
-timestamp: time stamp of event generation
-all traits for this event in format
"%s+%s" % (trait_name, trait_type)
in format: "%s+%s" % (ts, Event.message_id)
- Column Families:
f: contains the following qualifiers:
- event_type: description of event's type
- timestamp: time stamp of event generation
- all traits for this event in format:
.. code-block:: python
"%s+%s" % (trait_name, trait_type)
"""
CAPABILITIES = utils.update_nested(base.Connection.CAPABILITIES,
@ -212,8 +235,8 @@ class Connection(base.Connection):
.. note::
HBase Thrift does not support authentication and there is no
database name, so we are not looking for these in the url.
HBase Thrift does not support authentication and there is no
database name, so we are not looking for these in the url.
"""
opts = {}
result = network_utils.urlsplit(url)
@ -230,8 +253,9 @@ class Connection(base.Connection):
def update_alarm(self, alarm):
"""Create an alarm.
:param alarm: The alarm to create. It is Alarm object, so we need to
call as_dict()
call as_dict()
"""
_id = alarm.alarm_id
alarm_to_store = serialize_entry(alarm.as_dict())
@ -300,7 +324,7 @@ class Connection(base.Connection):
"""Write the data to the backend storage system.
:param data: a dictionary such as returned by
ceilometer.meter.meter_message_from_counter
ceilometer.meter.meter_message_from_counter
"""
with self.conn_pool.connection() as conn:
resource_table = conn.table(self.RESOURCE_TABLE)
@ -496,9 +520,9 @@ class Connection(base.Connection):
.. note::
Due to HBase limitations the aggregations are implemented
in the driver itself, therefore this method will be quite slow
because of all the Thrift traffic it is going to create.
Due to HBase limitations the aggregations are implemented
in the driver itself, therefore this method will be quite slow
because of all the Thrift traffic it is going to create.
"""
if groupby:
@ -575,6 +599,9 @@ class Connection(base.Connection):
"""Write the events to Hbase.
:param event_models: a list of models.Event objects.
:return problem_events: a list of events that could not be saved in a
(reason, event) tuple. From the reasons that are enumerated in
storage.models.Event only the UNKNOWN_PROBLEM is applicable here.
"""
problem_events = []
@ -607,7 +634,7 @@ class Connection(base.Connection):
"""Return an iterable of models.Event objects.
:param event_filter: storage.EventFilter object, consists of filters
for events that are stored in database.
for events that are stored in database.
"""
q, start, stop = make_events_query_from_filter(event_filter)
with self.conn_pool.connection() as conn:
@ -656,6 +683,7 @@ class Connection(base.Connection):
"""Return a dictionary containing the name and data type of the trait.
Only trait types for the provided event_type are returned.
:param event_type: the type of the Event
"""
@ -809,7 +837,7 @@ class MTable(object):
@staticmethod
def SingleColumnValueFilter(args, rows):
"""This method is called from scan() when 'SingleColumnValueFilter'
is found in the 'filter' argument
is found in the 'filter' argument.
"""
op = args[2]
column = "%s:%s" % (args[0], args[1])
@ -841,9 +869,10 @@ class MTable(object):
"""This is filter for testing "in-memory HBase".
This method is called from scan() when 'ColumnPrefixFilter' is found
in the 'filter' argument
:param args is list of filter arguments, contain prefix of column
:param rows is dict of row prefixes for filtering
in the 'filter' argument.
:param args: a list of filter arguments, contain prefix of column
:param rows: a dict of row prefixes for filtering
"""
value = args[0]
column = 'f:' + value
@ -860,11 +889,12 @@ class MTable(object):
def RowFilter(args, rows):
"""This is filter for testing "in-memory HBase".
This method is called from scan() when 'RowFilter'
is found in the 'filter' argument
:param args is list of filter arguments, it contains operator and
sought string
:param rows is dict of rows which are filtered
This method is called from scan() when 'RowFilter' is found in the
'filter' argument.
:param args: a list of filter arguments, it contains operator and
sought string
:param rows: a dict of rows which are filtered
"""
op = args[0]
value = args[1]
@ -962,10 +992,10 @@ def timestamp(dt, reverse=True):
the 'oldest' entries will be on top of the table or it should be the newest
ones (reversed timestamp case).
:param: dt: datetime which is translated to timestamp
:param: reverse: a boolean parameter for reverse or straight count of
timestamp in milliseconds
:return count or reversed count of milliseconds since start of epoch
:param dt: datetime which is translated to timestamp
:param reverse: a boolean parameter for reverse or straight count of
timestamp in milliseconds
:return: count or reversed count of milliseconds since start of epoch
"""
epoch = datetime.datetime(1970, 1, 1)
td = dt - epoch
@ -1008,7 +1038,8 @@ def make_events_query_from_filter(event_filter):
def make_timestamp_query(func, start=None, start_op=None, end=None,
end_op=None, bounds_only=False, **kwargs):
"""Return a filter start and stop row for filtering and a query
which based on the fact that CF-name is 'rts'
which based on the fact that CF-name is 'rts'.
:param start: Optional start timestamp
:param start_op: Optional start timestamp operator, like gt, ge
:param end: Optional end timestamp
@ -1059,7 +1090,7 @@ def make_query(metaquery=None, trait_query=None, **kwargs):
:param metaquery: optional metaquery dict
:param trait_query: optional boolean, for trait_query from kwargs
:param kwargs: key-value pairs to filter on. Key should be a real
column name in db
column name in db
"""
q = []
res_q = None
@ -1119,7 +1150,7 @@ def _get_meter_columns(metaquery, **kwargs):
:param metaquery: optional metaquery dict
:param kwargs: key-value pairs to filter on. Key should be a real
column name in db
column name in db
"""
columns = ['f:message', 'f:recorded_at']
columns.extend(["f:%s" % k for k, v in kwargs.items() if v])
@ -1133,7 +1164,7 @@ def make_sample_query_from_filter(sample_filter, require_meter=True):
:param sample_filter: SampleFilter instance
:param require_meter: If true and the filter does not have a meter,
raise an error.
raise an error.
"""
meter = sample_filter.meter
@ -1240,9 +1271,8 @@ def deserialize_entry(entry, get_raw_meta=True):
get_raw_meta is False.
:param entry: entry from HBase, without row name and timestamp
:param get_raw_meta: If true then raw metadata will be returned
If False metadata will be constructed from
'f:r_metadata.' fields
:param get_raw_meta: If true then raw metadata will be returned,
if False metadata will be constructed from 'f:r_metadata.' fields
"""
flatten_result = {}
sources = []

View File

@ -654,6 +654,7 @@ class Connection(pymongo_base.Connection):
Pagination works by requiring sort_key and sort_dir.
We use the last item in previous page as the 'marker' for pagination.
So we return values that follow the passed marker in the order.
:param q: the query dict passed in.
:param db_collection: Database collection that be query.
:param limit: maximum number of items to return.
@ -661,7 +662,8 @@ class Connection(pymongo_base.Connection):
results after this item.
:param sort_keys: array of attributes by which results be sorted.
:param sort_dir: direction in which results be sorted (asc, desc).
return: The query with sorting/pagination added.
:return: The query with sorting/pagination added.
"""
sort_keys = sort_keys or []

View File

@ -51,7 +51,11 @@ def write_autodoc_index():
RSTDIR = os.path.abspath(os.path.join(BASE_DIR, "sourcecode"))
SRCS = {'ceilometer': ROOT}
EXCLUDED_MODULES = ('ceilometer.tests')
EXCLUDED_MODULES = ('ceilometer.tests','ceilometer.compute.nova_notifier',
'ceilometer.openstack.common.db.sqlalchemy.session',
'ceilometer.openstack.common.middleware.audit',
'ceilometer.openstack.common.middleware.notifier',
'ceilometer.openstack.common.log_handler')
CURRENT_SOURCES = {}
if not(os.path.exists(RSTDIR)):