DMTF CADF format

Adding support for the DMTF Cloud Audit (CADF) format which will be
used along with a generic notification filter to audit 'core'
component APIs.

initial code drop

blueprint support-standard-audit-formats

Change-Id: I3b27ceae8faa6427e4be1290c1406102e790e2e3
This commit is contained in:
Gordon Chung
2013-08-06 09:45:23 -04:00
parent dd9bb23917
commit 7f76e5cf7b
28 changed files with 3042 additions and 0 deletions

5
openstack-common.conf Normal file
View File

@@ -0,0 +1,5 @@
[DEFAULT]
module=config.generator
module=jsonutils
module=log
base=pycadf

61
pycadf/attachment.py Normal file
View File

@@ -0,0 +1,61 @@
# -*- encoding: utf-8 -*-
#
# Copyright 2013 IBM Corp.
#
# Author: Matt Rutkowski <mrutkows@us.ibm.com>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from pycadf import cadftype
ATTACHMENT_KEYNAME_TYPEURI = "typeURI"
ATTACHMENT_KEYNAME_CONTENT = "content"
ATTACHMENT_KEYNAME_NAME = "name"
ATTACHMENT_KEYNAMES = [ATTACHMENT_KEYNAME_TYPEURI,
ATTACHMENT_KEYNAME_CONTENT,
ATTACHMENT_KEYNAME_NAME]
class Attachment(cadftype.CADFAbstractType):
# TODO(mrutkows): OpenStack / Ceilometer may want to define
# the set of approved attachment types in order to
# limit and validate them.
typeURI = cadftype.ValidatorDescriptor(ATTACHMENT_KEYNAME_TYPEURI,
lambda x: isinstance(x, str))
content = cadftype.ValidatorDescriptor(ATTACHMENT_KEYNAME_CONTENT)
name = cadftype.ValidatorDescriptor(ATTACHMENT_KEYNAME_NAME,
lambda x: isinstance(x, str))
def __init__(self, typeURI=None, content=None, name=None):
# Attachment.typeURI
if typeURI is not None:
setattr(self, ATTACHMENT_KEYNAME_TYPEURI, typeURI)
# Attachment.content
if content is not None:
setattr(self, ATTACHMENT_KEYNAME_CONTENT, content)
# Attachment.name
if name is not None:
setattr(self, ATTACHMENT_KEYNAME_NAME, name)
# self validate cadf:Attachment type against schema
def is_valid(self):
# Existence test, All attributes must exist for valid Attachment type
return (
hasattr(self, ATTACHMENT_KEYNAME_TYPEURI) and
hasattr(self, ATTACHMENT_KEYNAME_NAME) and
hasattr(self, ATTACHMENT_KEYNAME_CONTENT)
)

179
pycadf/cadftaxonomy.py Normal file
View File

@@ -0,0 +1,179 @@
# -*- encoding: utf-8 -*-
#
# Copyright 2013 IBM Corp.
#
# Author: Matt Rutkowski <mrutkows@us.ibm.com>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from pycadf import cadftype
TYPE_URI_ACTION = cadftype.CADF_VERSION_1_0_0 + 'action'
UNKNOWN = 'unknown'
# Commonly used (valid) Event.action values from Nova
ACTION_CREATE = 'create'
ACTION_READ = 'read'
ACTION_UPDATE = 'update'
ACTION_DELETE = 'delete'
# OpenStack specific, Profile or change CADF spec. to add this action
ACTION_LIST = 'list'
# TODO(mrutkows): Make global using WSGI mechanism
ACTION_TAXONOMY = frozenset([
'backup',
'capture',
ACTION_CREATE,
'configure',
ACTION_READ,
ACTION_LIST,
ACTION_UPDATE,
ACTION_DELETE,
'monitor',
'start',
'stop',
'deploy',
'undeploy',
'enable',
'disable',
'send',
'receive',
'authenticate',
'authenticate/login',
'revoke',
'renew',
'restore',
'evaluate',
'allow',
'deny',
'notify',
UNKNOWN
])
# TODO(mrutkows): validate absolute URIs as well
def is_valid_action(value):
return value in ACTION_TAXONOMY
TYPE_URI_OUTCOME = cadftype.CADF_VERSION_1_0_0 + 'outcome'
# Valid Event.outcome values
OUTCOME_SUCCESS = 'success'
OUTCOME_FAILURE = 'failure'
OUTCOME_PENDING = 'pending'
# TODO(mrutkows): Make global using WSGI mechanism
OUTCOME_TAXONOMY = frozenset([
OUTCOME_SUCCESS,
OUTCOME_FAILURE,
OUTCOME_PENDING,
UNKNOWN
])
# TODO(mrutkows): validate absolute URIs as well
def is_valid_outcome(value):
return value in OUTCOME_TAXONOMY
ACCOUNT_USER = 'service/security/account/user'
CADF_AUDIT_FILTER = 'service/security/audit/filter'
# TODO(mrutkows): Make global using WSGI mechanism
RESOURCE_TAXONOMY = frozenset([
'storage',
'storage/node',
'storage/volume',
'storage/memory',
'storage/container',
'storage/directory',
'storage/database',
'storage/queue',
'compute',
'compute/node',
'compute/cpu',
'compute/machine',
'compute/process',
'compute/thread',
'network',
'network/node',
'network/node/host',
'network/connection',
'network/domain',
'network/cluster',
'service',
'service/oss',
'service/bss',
'service/bss/metering',
'service/composition',
'service/compute',
'service/database',
'service/security',
'service/security/account',
ACCOUNT_USER,
CADF_AUDIT_FILTER,
'service/storage',
'service/storage/block',
'service/storage/image',
'service/storage/object',
'service/network',
'data',
'data/message',
'data/workload',
'data/workload/app',
'data/workload/service',
'data/workload/task',
'data/workload/job',
'data/file',
'data/file/catalog',
'data/file/log',
'data/template',
'data/package',
'data/image',
'data/module',
'data/config',
'data/directory',
'data/database',
'data/security',
'data/security/account',
'data/security/credential',
'data/security/group',
'data/security/identity',
'data/security/key',
'data/security/license',
'data/security/policy',
'data/security/profile',
'data/security/role',
'data/security/service',
'data/security/account/user',
'data/security/account/user/privilege',
'data/database/alias',
'data/database/catalog',
'data/database/constraints',
'data/database/index',
'data/database/instance',
'data/database/key',
'data/database/routine',
'data/database/schema',
'data/database/sequence',
'data/database/table',
'data/database/trigger',
'data/database/view',
UNKNOWN
])
# TODO(mrutkows): validate absolute URIs as well
def is_valid_resource(value):
return value in RESOURCE_TAXONOMY

93
pycadf/cadftype.py Normal file
View File

@@ -0,0 +1,93 @@
# -*- encoding: utf-8 -*-
#
# Copyright © 2013 IBM Corporation
#
# Author: Matt Rutkowski <mrutkows@us.ibm.com>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import abc
from pycadf.openstack.common import jsonutils
CADF_SCHEMA_1_0_0 = 'cadf:'
CADF_VERSION_1_0_0 = 'http://schemas.dmtf.org/cloud/audit/1.0/'
# Valid cadf:Event record "types"
EVENTTYPE_ACTIVITY = 'activity'
EVENTTYPE_MONITOR = 'monitor'
EVENTTYPE_CONTROL = 'control'
VALID_EVENTTYPES = frozenset([
EVENTTYPE_ACTIVITY,
EVENTTYPE_MONITOR,
EVENTTYPE_CONTROL
])
def is_valid_eventType(value):
return value in VALID_EVENTTYPES
# valid cadf:Event record "Reporter" roles
REPORTER_ROLE_OBSERVER = 'observer'
REPORTER_ROLE_MODIFIER = 'modifier'
REPORTER_ROLE_RELAY = 'relay'
VALID_REPORTER_ROLES = frozenset([
REPORTER_ROLE_OBSERVER,
REPORTER_ROLE_MODIFIER,
REPORTER_ROLE_RELAY
])
def is_valid_reporter_role(value):
return value in VALID_REPORTER_ROLES
class ValidatorDescriptor(object):
def __init__(self, name, func=None):
self.name = name
self.func = func
def __set__(self, instance, value):
if value is not None:
if self.func is not None:
if self.func(value):
instance.__dict__[self.name] = value
else:
raise ValueError('Value failed validation: %s' % self.func)
else:
instance.__dict__[self.name] = value
else:
raise ValueError('%s must not be None.' % self.name)
class CADFAbstractType(object):
"""The abstract base class for all CADF (complex) data types (classes)."""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def is_valid(self, value):
pass
def as_dict(self):
"""Return dict representation of Event."""
return jsonutils.to_primitive(self, convert_instances=True)
# TODO(mrutkows): Eventually, we want to use the OrderedDict (introduced
# in Python 2.7) type for all CADF classes to store attributes in a
# canonical form. Currently, OpenStack/Jenkins requires 2.6 compatibility
# The reason is that we want to be able to support signing all or parts
# of the event record and need to guarantee order.
# def to_ordered_dict(self, value):
# pass

228
pycadf/event.py Normal file
View File

@@ -0,0 +1,228 @@
# -*- encoding: utf-8 -*-
#
# Copyright 2013 IBM Corp.
#
# Author: Matt Rutkowski <mrutkows@us.ibm.com>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from pycadf import attachment
from pycadf import cadftaxonomy
from pycadf import cadftype
from pycadf import identifier
from pycadf import measurement
from pycadf import reason
from pycadf import reporterstep
from pycadf import resource
from pycadf import tag
from pycadf import timestamp
TYPE_URI_EVENT = cadftype.CADF_VERSION_1_0_0 + 'event'
# Event.eventType
EVENT_KEYNAME_TYPEURI = "typeURI"
EVENT_KEYNAME_EVENTTYPE = "eventType"
EVENT_KEYNAME_ID = "id"
EVENT_KEYNAME_EVENTTIME = "eventTime"
EVENT_KEYNAME_INITIATOR = "initiator"
EVENT_KEYNAME_INITIATORID = "initiatorId"
EVENT_KEYNAME_ACTION = "action"
EVENT_KEYNAME_TARGET = "target"
EVENT_KEYNAME_TARGETID = "targetId"
EVENT_KEYNAME_OUTCOME = "outcome"
EVENT_KEYNAME_REASON = "reason"
EVENT_KEYNAME_SEVERITY = "severity"
EVENT_KEYNAME_MEASUREMENTS = "measurements"
EVENT_KEYNAME_TAGS = "tags"
EVENT_KEYNAME_ATTACHMENTS = "attachments"
EVENT_KEYNAME_REPORTERCHAIN = "reporterchain"
EVENT_KEYNAMES = [EVENT_KEYNAME_TYPEURI,
EVENT_KEYNAME_EVENTTYPE,
EVENT_KEYNAME_ID,
EVENT_KEYNAME_EVENTTIME,
EVENT_KEYNAME_INITIATOR,
EVENT_KEYNAME_INITIATORID,
EVENT_KEYNAME_ACTION,
EVENT_KEYNAME_TARGET,
EVENT_KEYNAME_TARGETID,
EVENT_KEYNAME_OUTCOME,
EVENT_KEYNAME_REASON,
EVENT_KEYNAME_SEVERITY,
EVENT_KEYNAME_MEASUREMENTS,
EVENT_KEYNAME_TAGS,
EVENT_KEYNAME_ATTACHMENTS,
EVENT_KEYNAME_REPORTERCHAIN]
class Event(cadftype.CADFAbstractType):
eventType = cadftype.ValidatorDescriptor(
EVENT_KEYNAME_EVENTTYPE, lambda x: cadftype.is_valid_eventType(x))
id = cadftype.ValidatorDescriptor(EVENT_KEYNAME_ID,
lambda x: identifier.is_valid(x))
eventTime = cadftype.ValidatorDescriptor(EVENT_KEYNAME_EVENTTIME,
lambda x: timestamp.is_valid(x))
initiator = cadftype.ValidatorDescriptor(
EVENT_KEYNAME_INITIATOR,
(lambda x: isinstance(x, resource.Resource) and
x.is_valid()))
initiatorId = cadftype.ValidatorDescriptor(
EVENT_KEYNAME_INITIATORID, lambda x: identifier.is_valid(x))
action = cadftype.ValidatorDescriptor(
EVENT_KEYNAME_ACTION, lambda x: cadftaxonomy.is_valid_action(x))
target = cadftype.ValidatorDescriptor(
EVENT_KEYNAME_TARGET, (lambda x: isinstance(x, resource.Resource) and
x.is_valid()))
targetId = cadftype.ValidatorDescriptor(
EVENT_KEYNAME_TARGETID, lambda x: identifier.is_valid(x))
outcome = cadftype.ValidatorDescriptor(
EVENT_KEYNAME_OUTCOME, lambda x: cadftaxonomy.is_valid_outcome(x))
reason = cadftype.ValidatorDescriptor(
EVENT_KEYNAME_REASON,
lambda x: isinstance(x, reason.Reason) and x.is_valid())
severity = cadftype.ValidatorDescriptor(EVENT_KEYNAME_SEVERITY,
lambda x: isinstance(x, str))
def __init__(self, eventType=cadftype.EVENTTYPE_ACTIVITY,
id=identifier.generate_uuid(),
eventTime=timestamp.get_utc_now(),
action=cadftaxonomy.UNKNOWN, outcome=cadftaxonomy.UNKNOWN,
initiator=None, initiatorId=None, target=None, targetId=None,
severity=None, reason=None):
# Establish typeURI for the CADF Event data type
# TODO(mrutkows): support extended typeURIs for Event subtypes
setattr(self, EVENT_KEYNAME_TYPEURI, TYPE_URI_EVENT)
# Event.eventType (Mandatory)
setattr(self, EVENT_KEYNAME_EVENTTYPE, eventType)
# Event.id (Mandatory)
setattr(self, EVENT_KEYNAME_ID, id)
# Event.eventTime (Mandatory)
setattr(self, EVENT_KEYNAME_EVENTTIME, eventTime)
# Event.action (Mandatory)
setattr(self, EVENT_KEYNAME_ACTION, action)
# Event.outcome (Mandatory)
setattr(self, EVENT_KEYNAME_OUTCOME, outcome)
# Event.initiator (Mandatory if no initiatorId)
if initiator is not None:
setattr(self, EVENT_KEYNAME_INITIATOR, initiator)
# Event.initiatorId (Dependent)
if initiatorId is not None:
setattr(self, EVENT_KEYNAME_INITIATORID, initiatorId)
# Event.target (Mandatory if no targetId)
if target is not None:
setattr(self, EVENT_KEYNAME_TARGET, target)
# Event.targetId (Dependent)
if targetId is not None:
setattr(self, EVENT_KEYNAME_TARGETID, targetId)
# Event.severity (Optional)
if severity is not None:
setattr(self, EVENT_KEYNAME_SEVERITY, severity)
# Event.reason (Optional)
if reason is not None:
setattr(self, EVENT_KEYNAME_REASON, reason)
# Event.reporterchain (Mandatory)
# Prepare the Event.reporterchain (list of cadf:Reporterstep) since
# at least one cadf:Reporterstep entry is required
setattr(self, EVENT_KEYNAME_REPORTERCHAIN, list())
# Event.reporterchain
def add_reporterstep(self, step):
if step is not None and isinstance(step, reporterstep.Reporterstep):
if step.is_valid():
reporterchain = getattr(self,
EVENT_KEYNAME_REPORTERCHAIN)
reporterchain.append(step)
else:
raise ValueError('Invalid reporterstep')
else:
raise ValueError('Invalid reporterstep. '
'Value must be a Reporterstep')
# Event.measurements
def add_measurement(self, measure_val):
if (measure_val is not None
and isinstance(measure_val, measurement.Measurement)):
if measure_val.is_valid():
# Create the list of event.Measurements if needed
if not hasattr(self, EVENT_KEYNAME_MEASUREMENTS):
setattr(self, EVENT_KEYNAME_MEASUREMENTS, list())
measurements = getattr(self, EVENT_KEYNAME_MEASUREMENTS)
measurements.append(measure_val)
else:
raise ValueError('Invalid measurement')
else:
raise ValueError('Invalid measurement. '
'Value must be a Measurement')
# Event.tags
def add_tag(self, tag_val):
if tag.is_valid(tag_val):
if not hasattr(self, EVENT_KEYNAME_TAGS):
setattr(self, EVENT_KEYNAME_TAGS, list())
getattr(self, EVENT_KEYNAME_TAGS).append(tag_val)
else:
raise ValueError('Invalid tag')
# Event.attachments
def add_attachment(self, attachment_val):
if (attachment_val is not None
and isinstance(attachment_val, attachment.Attachment)):
if attachment_val.is_valid():
# Create the list of Attachments if needed
if not hasattr(self, EVENT_KEYNAME_ATTACHMENTS):
setattr(self, EVENT_KEYNAME_ATTACHMENTS, list())
attachments = getattr(self, EVENT_KEYNAME_ATTACHMENTS)
attachments.append(attachment_val)
else:
raise ValueError('Invalid attachment')
else:
raise ValueError('Invalid attachment. '
'Value must be an Attachment')
# self validate cadf:Event record against schema
def is_valid(self):
# TODO(mrutkows): Eventually, make sure all attributes are
# from either the CADF spec. (or profiles thereof)
# TODO(mrutkows): validate all child attributes that are CADF types
# TODO(mrutkows): Cannot have both an initiator and initiatorId
# TODO(mrutkows): Cannot have both an target and targetId
return (
hasattr(self, EVENT_KEYNAME_TYPEURI) and
hasattr(self, EVENT_KEYNAME_EVENTTYPE) and
hasattr(self, EVENT_KEYNAME_ID) and
hasattr(self, EVENT_KEYNAME_EVENTTIME) and
hasattr(self, EVENT_KEYNAME_ACTION) and
hasattr(self, EVENT_KEYNAME_OUTCOME) and
hasattr(self, EVENT_KEYNAME_INITIATOR) and
hasattr(self, EVENT_KEYNAME_TARGET) and
hasattr(self, EVENT_KEYNAME_REPORTERCHAIN)
)

58
pycadf/eventfactory.py Normal file
View File

@@ -0,0 +1,58 @@
# -*- encoding: utf-8 -*-
#
# Copyright 2013 IBM Corp.
#
# Author: Matt Rutkowski <mrutkows@us.ibm.com>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from pycadf import cadftype
from pycadf import event
ERROR_UNKNOWN_EVENTTYPE = 'Unknown CADF EventType requested on factory method'
class EventFactory(object):
"""Factory class to create different required attributes for
the following CADF event types:
'activity': for tracking any interesting system activities for audit
'monitor': Events that carry Metrics and Measurements and support
standards such as NIST
'control': For audit events that are based upon (security) policies
and reflect some policy decision.
"""
def new_event(self, eventType=cadftype.EVENTTYPE_ACTIVITY, **kwargs):
# for now, construct a base ('activity') event as the default
event_val = event.Event(**kwargs)
if not cadftype.is_valid_eventType(eventType):
raise ValueError(ERROR_UNKNOWN_EVENTTYPE)
event_val.eventType = eventType
# TODO(mrutkows): CADF is only being used for basic
# 'activity' auditing (on APIs), An IF-ELIF will
# become more meaningful as we add support for other
# event types.
# elif eventType == cadftype.EVENTTYPE_MONITOR:
# # TODO(mrutkows): If we add support for standard (NIST)
# # monitoring messages, we will would have a "monitor"
# # subclass of the CADF Event type and create it here
# event_val.set_eventType(cadftype.EVENTTYPE_MONITOR)
# elif eventType == cadftype.EVENTTYPE_CONTROL:
# # TODO(mrutkows): If we add support for standard (NIST)
# # monitoring messages, we will would have a "control"
# # subclass of the CADF Event type and create it here
# event_val.set_eventType(cadftype.EVENTTYPE_CONTROL)
return event_val

118
pycadf/geolocation.py Normal file
View File

@@ -0,0 +1,118 @@
# -*- encoding: utf-8 -*-
#
# Copyright 2013 IBM Corp.
#
# Author: Matt Rutkowski <mrutkows@us.ibm.com>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from pycadf import cadftype
from pycadf import identifier
# Geolocation types can appear outside a cadf:Event record context, in these
# cases a typeURI may be used to identify the cadf:Geolocation data type.
TYPE_URI_GEOLOCATION = cadftype.CADF_VERSION_1_0_0 + 'geolocation'
GEO_KEYNAME_ID = "id"
GEO_KEYNAME_LATITUDE = "latitude"
GEO_KEYNAME_LONGITUDE = "longitude"
GEO_KEYNAME_ELEVATION = "elevation"
GEO_KEYNAME_ACCURACY = "accuracy"
GEO_KEYNAME_CITY = "city"
GEO_KEYNAME_STATE = "state"
GEO_KEYNAME_REGIONICANN = "regionICANN"
#GEO_KEYNAME_ANNOTATIONS = "annotations"
GEO_KEYNAMES = [GEO_KEYNAME_ID,
GEO_KEYNAME_LATITUDE,
GEO_KEYNAME_LONGITUDE,
GEO_KEYNAME_ELEVATION,
GEO_KEYNAME_ACCURACY,
GEO_KEYNAME_CITY,
GEO_KEYNAME_STATE,
GEO_KEYNAME_REGIONICANN
#GEO_KEYNAME_ANNOTATIONS
]
class Geolocation(cadftype.CADFAbstractType):
id = cadftype.ValidatorDescriptor(GEO_KEYNAME_ID,
lambda x: identifier.is_valid(x))
# TODO(mrutkows): we may want to do more validation to make
# sure numeric range represented by string is valid
latitude = cadftype.ValidatorDescriptor(GEO_KEYNAME_LATITUDE,
lambda x: isinstance(x, str))
longitude = cadftype.ValidatorDescriptor(GEO_KEYNAME_LONGITUDE,
lambda x: isinstance(x, str))
elevation = cadftype.ValidatorDescriptor(GEO_KEYNAME_ELEVATION,
lambda x: isinstance(x, str))
accuracy = cadftype.ValidatorDescriptor(GEO_KEYNAME_ACCURACY,
lambda x: isinstance(x, str))
city = cadftype.ValidatorDescriptor(GEO_KEYNAME_CITY,
lambda x: isinstance(x, str))
state = cadftype.ValidatorDescriptor(GEO_KEYNAME_STATE,
lambda x: isinstance(x, str))
regionICANN = cadftype.ValidatorDescriptor(GEO_KEYNAME_REGIONICANN,
lambda x: isinstance(x, str))
def __init__(self, id=None, latitude=None, longitude=None,
elevation=None, accuracy=None, city=None, state=None,
regionICANN=None):
# Geolocation.id
if id is not None:
setattr(self, GEO_KEYNAME_ID, id)
# Geolocation.latitude
if latitude is not None:
setattr(self, GEO_KEYNAME_LATITUDE, latitude)
# Geolocation.longitude
if longitude is not None:
setattr(self, GEO_KEYNAME_LONGITUDE, longitude)
# Geolocation.elevation
if elevation is not None:
setattr(self, GEO_KEYNAME_ELEVATION, elevation)
# Geolocation.accuracy
if accuracy is not None:
setattr(self, GEO_KEYNAME_ACCURACY, accuracy)
# Geolocation.city
if city is not None:
setattr(self, GEO_KEYNAME_CITY, city)
# Geolocation.state
if state is not None:
setattr(self, GEO_KEYNAME_STATE, state)
# Geolocation.regionICANN
if regionICANN is not None:
setattr(self, GEO_KEYNAME_REGIONICANN, regionICANN)
# TODO(mrutkows): add mechanism for annotations, OpenStack may choose
# not to support this "extension mechanism" and is not required (and not
# critical in many audit contexts)
def set_annotations(self, value):
raise NotImplementedError()
# setattr(self, GEO_KEYNAME_ANNOTATIONS, value)
# self validate cadf:Geolocation type
def is_valid(self):
# TODO(mrutkows): validate specific attribute type/format
for attr in GEO_KEYNAMES:
if not hasattr(self, attr):
return False
return True

42
pycadf/identifier.py Normal file
View File

@@ -0,0 +1,42 @@
# -*- encoding: utf-8 -*-
#
# Copyright 2013 IBM Corp.
#
# Author: Matt Rutkowski <mrutkows@us.ibm.com>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import uuid
from pycadf import cadftype
from pycadf import timestamp
# TODO(mrutkows): Add openstack namespace prefix (e.g. 'openstack:') to all
# cadf:Identifiers
# TODO(mrutkows): make the namespace prefix configurable and have it resolve to
# a full openstack namespace/domain value via some declaration (e.g.
# "openstack:" == "http:\\www.openstack.org\")...
def generate_uuid():
uuid_temp = uuid.uuid5(uuid.NAMESPACE_DNS,
cadftype.CADF_VERSION_1_0_0
+ timestamp.get_utc_now())
return str(uuid_temp)
# TODO(mrutkows): validate any cadf:Identifier (type) record against
# CADF schema. This would include schema validation as an optional parm.
def is_valid(value):
if not isinstance(value, str):
raise TypeError
return True

67
pycadf/measurement.py Normal file
View File

@@ -0,0 +1,67 @@
# -*- encoding: utf-8 -*-
#
# Copyright 2013 IBM Corp.
#
# Author: Matt Rutkowski <mrutkows@us.ibm.com>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from pycadf import cadftype
from pycadf import identifier
from pycadf import metric
from pycadf import resource
MEASUREMENT_KEYNAME_RESULT = "result"
MEASUREMENT_KEYNAME_METRIC = "metric"
MEASUREMENT_KEYNAME_METRICID = "metricId"
MEASUREMENT_KEYNAME_CALCBY = "calculatedBy"
MEASUREMENT_KEYNAMES = [MEASUREMENT_KEYNAME_RESULT,
MEASUREMENT_KEYNAME_METRICID,
MEASUREMENT_KEYNAME_METRIC,
MEASUREMENT_KEYNAME_CALCBY]
class Measurement(cadftype.CADFAbstractType):
result = cadftype.ValidatorDescriptor(MEASUREMENT_KEYNAME_RESULT)
metric = cadftype.ValidatorDescriptor(
MEASUREMENT_KEYNAME_METRIC, lambda x: isinstance(x, metric.Metric))
metricId = cadftype.ValidatorDescriptor(MEASUREMENT_KEYNAME_METRICID,
lambda x: identifier.is_valid(x))
calculatedBy = cadftype.ValidatorDescriptor(
MEASUREMENT_KEYNAME_CALCBY,
(lambda x: isinstance(x, resource.Resource) and x.is_valid()))
def __init__(self, result=None, metric=None, metricId=None,
calculatedBy=None):
# Measurement.result
if result is not None:
setattr(self, MEASUREMENT_KEYNAME_RESULT, result)
# Measurement.metricId
if metricId is not None:
setattr(self, MEASUREMENT_KEYNAME_METRICID, metricId)
# Measurement.metric
if metric is not None:
setattr(self, MEASUREMENT_KEYNAME_METRIC, metric)
# Measurement.calculaedBy
if calculatedBy is not None:
setattr(self, MEASUREMENT_KEYNAME_CALCBY, calculatedBy)
# self validate this cadf:Measurement type against schema
def is_valid(self):
return hasattr(self, MEASUREMENT_KEYNAME_RESULT)

73
pycadf/metric.py Normal file
View File

@@ -0,0 +1,73 @@
# -*- encoding: utf-8 -*-
#
# Copyright 2013 IBM Corp.
#
# Author: Matt Rutkowski <mrutkows@us.ibm.com>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from pycadf import cadftype
from pycadf import identifier
# Metric types can appear outside a cadf:Event record context, in these cases
# a typeURI may be used to identify the cadf:Metric data type.
TYPE_URI_METRIC = cadftype.CADF_VERSION_1_0_0 + 'metric'
METRIC_KEYNAME_METRICID = "metricId"
METRIC_KEYNAME_UNIT = "unit"
METRIC_KEYNAME_NAME = "name"
#METRIC_KEYNAME_ANNOTATIONS = "annotations"
METRIC_KEYNAMES = [METRIC_KEYNAME_METRICID,
METRIC_KEYNAME_UNIT,
METRIC_KEYNAME_NAME
#METRIC_KEYNAME_ANNOTATIONS
]
class Metric(cadftype.CADFAbstractType):
metricId = cadftype.ValidatorDescriptor(METRIC_KEYNAME_METRICID,
lambda x: identifier.is_valid(x))
unit = cadftype.ValidatorDescriptor(METRIC_KEYNAME_UNIT,
lambda x: isinstance(x, str))
name = cadftype.ValidatorDescriptor(METRIC_KEYNAME_NAME,
lambda x: isinstance(x, str))
def __init__(self, metricId=identifier.generate_uuid(),
unit=None, name=None):
# Metric.id
setattr(self, METRIC_KEYNAME_METRICID, metricId)
# Metric.unit
if unit is not None:
setattr(self, METRIC_KEYNAME_UNIT, unit)
# Metric.name
if name is not None:
setattr(self, METRIC_KEYNAME_NAME, name)
# TODO(mrutkows): add mechanism for annotations, OpenStack may choose
# not to support this "extension mechanism" and is not required (and not
# critical in many audit contexts)
def set_annotations(self, value):
raise NotImplementedError()
# setattr(self, METRIC_KEYNAME_ANNOTATIONS, value)
# self validate cadf:Metric type against schema
def is_valid(self):
# Existence test, id, and unit attributes must both exist
return (
hasattr(self, METRIC_KEYNAME_METRICID) and
hasattr(self, METRIC_KEYNAME_UNIT)
)

View File

View File

View File

@@ -0,0 +1,256 @@
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 SINA Corporation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
"""Extracts OpenStack config option info from module(s)."""
from __future__ import print_function
import imp
import os
import re
import socket
import sys
import textwrap
from oslo.config import cfg
from pycadf.openstack.common import gettextutils
from pycadf.openstack.common import importutils
gettextutils.install('pycadf')
STROPT = "StrOpt"
BOOLOPT = "BoolOpt"
INTOPT = "IntOpt"
FLOATOPT = "FloatOpt"
LISTOPT = "ListOpt"
MULTISTROPT = "MultiStrOpt"
OPT_TYPES = {
STROPT: 'string value',
BOOLOPT: 'boolean value',
INTOPT: 'integer value',
FLOATOPT: 'floating point value',
LISTOPT: 'list value',
MULTISTROPT: 'multi valued',
}
OPTION_REGEX = re.compile(r"(%s)" % "|".join([STROPT, BOOLOPT, INTOPT,
FLOATOPT, LISTOPT,
MULTISTROPT]))
PY_EXT = ".py"
BASEDIR = os.path.abspath(os.path.join(os.path.dirname(__file__),
"../../../../"))
WORDWRAP_WIDTH = 60
def generate(srcfiles):
mods_by_pkg = dict()
for filepath in srcfiles:
pkg_name = filepath.split(os.sep)[1]
mod_str = '.'.join(['.'.join(filepath.split(os.sep)[:-1]),
os.path.basename(filepath).split('.')[0]])
mods_by_pkg.setdefault(pkg_name, list()).append(mod_str)
# NOTE(lzyeval): place top level modules before packages
pkg_names = filter(lambda x: x.endswith(PY_EXT), mods_by_pkg.keys())
pkg_names.sort()
ext_names = filter(lambda x: x not in pkg_names, mods_by_pkg.keys())
ext_names.sort()
pkg_names.extend(ext_names)
# opts_by_group is a mapping of group name to an options list
# The options list is a list of (module, options) tuples
opts_by_group = {'DEFAULT': []}
for pkg_name in pkg_names:
mods = mods_by_pkg.get(pkg_name)
mods.sort()
for mod_str in mods:
if mod_str.endswith('.__init__'):
mod_str = mod_str[:mod_str.rfind(".")]
mod_obj = _import_module(mod_str)
if not mod_obj:
continue
for group, opts in _list_opts(mod_obj):
opts_by_group.setdefault(group, []).append((mod_str, opts))
print_group_opts('DEFAULT', opts_by_group.pop('DEFAULT', []))
for group, opts in opts_by_group.items():
print_group_opts(group, opts)
def _import_module(mod_str):
try:
if mod_str.startswith('bin.'):
imp.load_source(mod_str[4:], os.path.join('bin', mod_str[4:]))
return sys.modules[mod_str[4:]]
else:
return importutils.import_module(mod_str)
except ImportError as ie:
sys.stderr.write("%s\n" % str(ie))
return None
except Exception:
return None
def _is_in_group(opt, group):
"Check if opt is in group."
for key, value in group._opts.items():
if value['opt'] == opt:
return True
return False
def _guess_groups(opt, mod_obj):
# is it in the DEFAULT group?
if _is_in_group(opt, cfg.CONF):
return 'DEFAULT'
# what other groups is it in?
for key, value in cfg.CONF.items():
if isinstance(value, cfg.CONF.GroupAttr):
if _is_in_group(opt, value._group):
return value._group.name
raise RuntimeError(
"Unable to find group for option %s, "
"maybe it's defined twice in the same group?"
% opt.name
)
def _list_opts(obj):
def is_opt(o):
return (isinstance(o, cfg.Opt) and
not isinstance(o, cfg.SubCommandOpt))
opts = list()
for attr_str in dir(obj):
attr_obj = getattr(obj, attr_str)
if is_opt(attr_obj):
opts.append(attr_obj)
elif (isinstance(attr_obj, list) and
all(map(lambda x: is_opt(x), attr_obj))):
opts.extend(attr_obj)
ret = {}
for opt in opts:
ret.setdefault(_guess_groups(opt, obj), []).append(opt)
return ret.items()
def print_group_opts(group, opts_by_module):
print("[%s]" % group)
print('')
for mod, opts in opts_by_module:
print('#')
print('# Options defined in %s' % mod)
print('#')
print('')
for opt in opts:
_print_opt(opt)
print('')
def _get_my_ip():
try:
csock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
csock.connect(('8.8.8.8', 80))
(addr, port) = csock.getsockname()
csock.close()
return addr
except socket.error:
return None
def _sanitize_default(name, value):
"""Set up a reasonably sensible default for pybasedir, my_ip and host."""
if value.startswith(sys.prefix):
# NOTE(jd) Don't use os.path.join, because it is likely to think the
# second part is an absolute pathname and therefore drop the first
# part.
value = os.path.normpath("/usr/" + value[len(sys.prefix):])
elif value.startswith(BASEDIR):
return value.replace(BASEDIR, '/usr/lib/python/site-packages')
elif BASEDIR in value:
return value.replace(BASEDIR, '')
elif value == _get_my_ip():
return '10.0.0.1'
elif value == socket.gethostname() and 'host' in name:
return 'pycadf'
elif value.strip() != value:
return '"%s"' % value
return value
def _print_opt(opt):
opt_name, opt_default, opt_help = opt.dest, opt.default, opt.help
if not opt_help:
sys.stderr.write('WARNING: "%s" is missing help string.\n' % opt_name)
opt_help = ""
opt_type = None
try:
opt_type = OPTION_REGEX.search(str(type(opt))).group(0)
except (ValueError, AttributeError) as err:
sys.stderr.write("%s\n" % str(err))
sys.exit(1)
opt_help += ' (' + OPT_TYPES[opt_type] + ')'
print('#', "\n# ".join(textwrap.wrap(opt_help, WORDWRAP_WIDTH)))
try:
if opt_default is None:
print('#%s=<None>' % opt_name)
elif opt_type == STROPT:
assert(isinstance(opt_default, basestring))
print('#%s=%s' % (opt_name, _sanitize_default(opt_name,
opt_default)))
elif opt_type == BOOLOPT:
assert(isinstance(opt_default, bool))
print('#%s=%s' % (opt_name, str(opt_default).lower()))
elif opt_type == INTOPT:
assert(isinstance(opt_default, int) and
not isinstance(opt_default, bool))
print('#%s=%s' % (opt_name, opt_default))
elif opt_type == FLOATOPT:
assert(isinstance(opt_default, float))
print('#%s=%s' % (opt_name, opt_default))
elif opt_type == LISTOPT:
assert(isinstance(opt_default, list))
print('#%s=%s' % (opt_name, ','.join(opt_default)))
elif opt_type == MULTISTROPT:
assert(isinstance(opt_default, list))
if not opt_default:
opt_default = ['']
for default in opt_default:
print('#%s=%s' % (opt_name, default))
print('')
except Exception:
sys.stderr.write('Error in option "%s"\n' % opt_name)
sys.exit(1)
def main():
if len(sys.argv) < 2:
print("usage: %s [srcfile]...\n" % sys.argv[0])
sys.exit(0)
generate(sys.argv[1:])
if __name__ == '__main__':
main()

View File

@@ -0,0 +1,305 @@
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 Red Hat, Inc.
# Copyright 2013 IBM Corp.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
gettext for openstack-common modules.
Usual usage in an openstack.common module:
from pycadf.openstack.common.gettextutils import _
"""
import copy
import gettext
import logging.handlers
import os
import re
import UserString
from babel import localedata
import six
_localedir = os.environ.get('pycadf'.upper() + '_LOCALEDIR')
_t = gettext.translation('pycadf', localedir=_localedir, fallback=True)
_AVAILABLE_LANGUAGES = []
def _(msg):
return _t.ugettext(msg)
def install(domain, lazy=False):
"""Install a _() function using the given translation domain.
Given a translation domain, install a _() function using gettext's
install() function.
The main difference from gettext.install() is that we allow
overriding the default localedir (e.g. /usr/share/locale) using
a translation-domain-specific environment variable (e.g.
NOVA_LOCALEDIR).
:param domain: the translation domain
:param lazy: indicates whether or not to install the lazy _() function.
The lazy _() introduces a way to do deferred translation
of messages by installing a _ that builds Message objects,
instead of strings, which can then be lazily translated into
any available locale.
"""
if lazy:
# NOTE(mrodden): Lazy gettext functionality.
#
# The following introduces a deferred way to do translations on
# messages in OpenStack. We override the standard _() function
# and % (format string) operation to build Message objects that can
# later be translated when we have more information.
#
# Also included below is an example LocaleHandler that translates
# Messages to an associated locale, effectively allowing many logs,
# each with their own locale.
def _lazy_gettext(msg):
"""Create and return a Message object.
Lazy gettext function for a given domain, it is a factory method
for a project/module to get a lazy gettext function for its own
translation domain (i.e. nova, glance, cinder, etc.)
Message encapsulates a string so that we can translate
it later when needed.
"""
return Message(msg, domain)
import __builtin__
__builtin__.__dict__['_'] = _lazy_gettext
else:
localedir = '%s_LOCALEDIR' % domain.upper()
gettext.install(domain,
localedir=os.environ.get(localedir),
unicode=True)
class Message(UserString.UserString, object):
"""Class used to encapsulate translatable messages."""
def __init__(self, msg, domain):
# _msg is the gettext msgid and should never change
self._msg = msg
self._left_extra_msg = ''
self._right_extra_msg = ''
self.params = None
self.locale = None
self.domain = domain
@property
def data(self):
# NOTE(mrodden): this should always resolve to a unicode string
# that best represents the state of the message currently
localedir = os.environ.get(self.domain.upper() + '_LOCALEDIR')
if self.locale:
lang = gettext.translation(self.domain,
localedir=localedir,
languages=[self.locale],
fallback=True)
else:
# use system locale for translations
lang = gettext.translation(self.domain,
localedir=localedir,
fallback=True)
full_msg = (self._left_extra_msg +
lang.ugettext(self._msg) +
self._right_extra_msg)
if self.params is not None:
full_msg = full_msg % self.params
return six.text_type(full_msg)
def _save_dictionary_parameter(self, dict_param):
full_msg = self.data
# look for %(blah) fields in string;
# ignore %% and deal with the
# case where % is first character on the line
keys = re.findall('(?:[^%]|^)%\((\w*)\)[a-z]', full_msg)
# if we don't find any %(blah) blocks but have a %s
if not keys and re.findall('(?:[^%]|^)%[a-z]', full_msg):
# apparently the full dictionary is the parameter
params = copy.deepcopy(dict_param)
else:
params = {}
for key in keys:
try:
params[key] = copy.deepcopy(dict_param[key])
except TypeError:
# cast uncopyable thing to unicode string
params[key] = unicode(dict_param[key])
return params
def _save_parameters(self, other):
# we check for None later to see if
# we actually have parameters to inject,
# so encapsulate if our parameter is actually None
if other is None:
self.params = (other, )
elif isinstance(other, dict):
self.params = self._save_dictionary_parameter(other)
else:
# fallback to casting to unicode,
# this will handle the problematic python code-like
# objects that cannot be deep-copied
try:
self.params = copy.deepcopy(other)
except TypeError:
self.params = unicode(other)
return self
# overrides to be more string-like
def __unicode__(self):
return self.data
def __str__(self):
return self.data.encode('utf-8')
def __getstate__(self):
to_copy = ['_msg', '_right_extra_msg', '_left_extra_msg',
'domain', 'params', 'locale']
new_dict = self.__dict__.fromkeys(to_copy)
for attr in to_copy:
new_dict[attr] = copy.deepcopy(self.__dict__[attr])
return new_dict
def __setstate__(self, state):
for (k, v) in state.items():
setattr(self, k, v)
# operator overloads
def __add__(self, other):
copied = copy.deepcopy(self)
copied._right_extra_msg += other.__str__()
return copied
def __radd__(self, other):
copied = copy.deepcopy(self)
copied._left_extra_msg += other.__str__()
return copied
def __mod__(self, other):
# do a format string to catch and raise
# any possible KeyErrors from missing parameters
self.data % other
copied = copy.deepcopy(self)
return copied._save_parameters(other)
def __mul__(self, other):
return self.data * other
def __rmul__(self, other):
return other * self.data
def __getitem__(self, key):
return self.data[key]
def __getslice__(self, start, end):
return self.data.__getslice__(start, end)
def __getattribute__(self, name):
# NOTE(mrodden): handle lossy operations that we can't deal with yet
# These override the UserString implementation, since UserString
# uses our __class__ attribute to try and build a new message
# after running the inner data string through the operation.
# At that point, we have lost the gettext message id and can just
# safely resolve to a string instead.
ops = ['capitalize', 'center', 'decode', 'encode',
'expandtabs', 'ljust', 'lstrip', 'replace', 'rjust', 'rstrip',
'strip', 'swapcase', 'title', 'translate', 'upper', 'zfill']
if name in ops:
return getattr(self.data, name)
else:
return UserString.UserString.__getattribute__(self, name)
def get_available_languages(domain):
"""Lists the available languages for the given translation domain.
:param domain: the domain to get languages for
"""
if _AVAILABLE_LANGUAGES:
return _AVAILABLE_LANGUAGES
localedir = '%s_LOCALEDIR' % domain.upper()
find = lambda x: gettext.find(domain,
localedir=os.environ.get(localedir),
languages=[x])
# NOTE(mrodden): en_US should always be available (and first in case
# order matters) since our in-line message strings are en_US
_AVAILABLE_LANGUAGES.append('en_US')
# NOTE(luisg): Babel <1.0 used a function called list(), which was
# renamed to locale_identifiers() in >=1.0, the requirements master list
# requires >=0.9.6, uncapped, so defensively work with both. We can remove
# this check when the master list updates to >=1.0, and all projects udpate
list_identifiers = (getattr(localedata, 'list', None) or
getattr(localedata, 'locale_identifiers'))
locale_identifiers = list_identifiers()
for i in locale_identifiers:
if find(i) is not None:
_AVAILABLE_LANGUAGES.append(i)
return _AVAILABLE_LANGUAGES
def get_localized_message(message, user_locale):
"""Gets a localized version of the given message in the given locale."""
if (isinstance(message, Message)):
if user_locale:
message.locale = user_locale
return unicode(message)
else:
return message
class LocaleHandler(logging.Handler):
"""Handler that can have a locale associated to translate Messages.
A quick example of how to utilize the Message class above.
LocaleHandler takes a locale and a target logging.Handler object
to forward LogRecord objects to after translating the internal Message.
"""
def __init__(self, locale, target):
"""Initialize a LocaleHandler
:param locale: locale to use for translating messages
:param target: logging.Handler object to forward
LogRecord objects to after translation
"""
logging.Handler.__init__(self)
self.locale = locale
self.target = target
def emit(self, record):
if isinstance(record.msg, Message):
# set the locale and resolve to a string
record.msg.locale = self.locale
self.target.emit(record)

View File

@@ -0,0 +1,68 @@
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2011 OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Import related utilities and helper functions.
"""
import sys
import traceback
def import_class(import_str):
"""Returns a class from a string including module and class."""
mod_str, _sep, class_str = import_str.rpartition('.')
try:
__import__(mod_str)
return getattr(sys.modules[mod_str], class_str)
except (ValueError, AttributeError):
raise ImportError('Class %s cannot be found (%s)' %
(class_str,
traceback.format_exception(*sys.exc_info())))
def import_object(import_str, *args, **kwargs):
"""Import a class and return an instance of it."""
return import_class(import_str)(*args, **kwargs)
def import_object_ns(name_space, import_str, *args, **kwargs):
"""Tries to import object from default namespace.
Imports a class and return an instance of it, first by trying
to find the class in a default namespace, then failing back to
a full path if not found in the default namespace.
"""
import_value = "%s.%s" % (name_space, import_str)
try:
return import_class(import_value)(*args, **kwargs)
except ImportError:
return import_class(import_str)(*args, **kwargs)
def import_module(import_str):
"""Import a module."""
__import__(import_str)
return sys.modules[import_str]
def try_import(import_str, default=None):
"""Try to import a module and if it fails return default."""
try:
return import_module(import_str)
except ImportError:
return default

View File

@@ -0,0 +1,172 @@
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# Copyright 2011 Justin Santa Barbara
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
'''
JSON related utilities.
This module provides a few things:
1) A handy function for getting an object down to something that can be
JSON serialized. See to_primitive().
2) Wrappers around loads() and dumps(). The dumps() wrapper will
automatically use to_primitive() for you if needed.
3) This sets up anyjson to use the loads() and dumps() wrappers if anyjson
is available.
'''
import datetime
import functools
import inspect
import itertools
import json
import types
import xmlrpclib
import netaddr
import six
from pycadf.openstack.common import timeutils
_nasty_type_tests = [inspect.ismodule, inspect.isclass, inspect.ismethod,
inspect.isfunction, inspect.isgeneratorfunction,
inspect.isgenerator, inspect.istraceback, inspect.isframe,
inspect.iscode, inspect.isbuiltin, inspect.isroutine,
inspect.isabstract]
_simple_types = (types.NoneType, int, basestring, bool, float, long)
def to_primitive(value, convert_instances=False, convert_datetime=True,
level=0, max_depth=3):
"""Convert a complex object into primitives.
Handy for JSON serialization. We can optionally handle instances,
but since this is a recursive function, we could have cyclical
data structures.
To handle cyclical data structures we could track the actual objects
visited in a set, but not all objects are hashable. Instead we just
track the depth of the object inspections and don't go too deep.
Therefore, convert_instances=True is lossy ... be aware.
"""
# handle obvious types first - order of basic types determined by running
# full tests on nova project, resulting in the following counts:
# 572754 <type 'NoneType'>
# 460353 <type 'int'>
# 379632 <type 'unicode'>
# 274610 <type 'str'>
# 199918 <type 'dict'>
# 114200 <type 'datetime.datetime'>
# 51817 <type 'bool'>
# 26164 <type 'list'>
# 6491 <type 'float'>
# 283 <type 'tuple'>
# 19 <type 'long'>
if isinstance(value, _simple_types):
return value
if isinstance(value, datetime.datetime):
if convert_datetime:
return timeutils.strtime(value)
else:
return value
# value of itertools.count doesn't get caught by nasty_type_tests
# and results in infinite loop when list(value) is called.
if type(value) == itertools.count:
return six.text_type(value)
# FIXME(vish): Workaround for LP bug 852095. Without this workaround,
# tests that raise an exception in a mocked method that
# has a @wrap_exception with a notifier will fail. If
# we up the dependency to 0.5.4 (when it is released) we
# can remove this workaround.
if getattr(value, '__module__', None) == 'mox':
return 'mock'
if level > max_depth:
return '?'
# The try block may not be necessary after the class check above,
# but just in case ...
try:
recursive = functools.partial(to_primitive,
convert_instances=convert_instances,
convert_datetime=convert_datetime,
level=level,
max_depth=max_depth)
if isinstance(value, dict):
return dict((k, recursive(v)) for k, v in value.iteritems())
elif isinstance(value, (list, tuple)):
return [recursive(lv) for lv in value]
# It's not clear why xmlrpclib created their own DateTime type, but
# for our purposes, make it a datetime type which is explicitly
# handled
if isinstance(value, xmlrpclib.DateTime):
value = datetime.datetime(*tuple(value.timetuple())[:6])
if convert_datetime and isinstance(value, datetime.datetime):
return timeutils.strtime(value)
elif hasattr(value, 'iteritems'):
return recursive(dict(value.iteritems()), level=level + 1)
elif hasattr(value, '__iter__'):
return recursive(list(value))
elif convert_instances and hasattr(value, '__dict__'):
# Likely an instance of something. Watch for cycles.
# Ignore class member vars.
return recursive(value.__dict__, level=level + 1)
elif isinstance(value, netaddr.IPAddress):
return six.text_type(value)
else:
if any(test(value) for test in _nasty_type_tests):
return six.text_type(value)
return value
except TypeError:
# Class objects are tricky since they may define something like
# __iter__ defined but it isn't callable as list().
return six.text_type(value)
def dumps(value, default=to_primitive, **kwargs):
return json.dumps(value, default=default, **kwargs)
def loads(s):
return json.loads(s)
def load(s):
return json.load(s)
try:
import anyjson
except ImportError:
pass
else:
anyjson._modules.append((__name__, 'dumps', TypeError,
'loads', ValueError, 'load'))
anyjson.force_implementation(__name__)

View File

@@ -0,0 +1,47 @@
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2011 OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Local storage of variables using weak references"""
import threading
import weakref
class WeakLocal(threading.local):
def __getattribute__(self, attr):
rval = super(WeakLocal, self).__getattribute__(attr)
if rval:
# NOTE(mikal): this bit is confusing. What is stored is a weak
# reference, not the value itself. We therefore need to lookup
# the weak reference and return the inner value here.
rval = rval()
return rval
def __setattr__(self, attr, value):
value = weakref.ref(value)
return super(WeakLocal, self).__setattr__(attr, value)
# NOTE(mikal): the name "store" should be deprecated in the future
store = WeakLocal()
# A "weak" store uses weak references and allows an object to fall out of scope
# when it falls out of scope in the code that uses the thread local storage. A
# "strong" store will hold a reference to the object so that it never falls out
# of scope.
weak_store = WeakLocal()
strong_store = threading.local()

View File

@@ -0,0 +1,559 @@
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2011 OpenStack Foundation.
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Openstack logging handler.
This module adds to logging functionality by adding the option to specify
a context object when calling the various log methods. If the context object
is not specified, default formatting is used. Additionally, an instance uuid
may be passed as part of the log message, which is intended to make it easier
for admins to find messages related to a specific instance.
It also allows setting of formatting information through conf.
"""
import inspect
import itertools
import logging
import logging.config
import logging.handlers
import os
import sys
import traceback
from oslo.config import cfg
from six import moves
from pycadf.openstack.common.gettextutils import _ # noqa
from pycadf.openstack.common import importutils
from pycadf.openstack.common import jsonutils
from pycadf.openstack.common import local
_DEFAULT_LOG_DATE_FORMAT = "%Y-%m-%d %H:%M:%S"
common_cli_opts = [
cfg.BoolOpt('debug',
short='d',
default=False,
help='Print debugging output (set logging level to '
'DEBUG instead of default WARNING level).'),
cfg.BoolOpt('verbose',
short='v',
default=False,
help='Print more verbose output (set logging level to '
'INFO instead of default WARNING level).'),
]
logging_cli_opts = [
cfg.StrOpt('log-config',
metavar='PATH',
help='If this option is specified, the logging configuration '
'file specified is used and overrides any other logging '
'options specified. Please see the Python logging module '
'documentation for details on logging configuration '
'files.'),
cfg.StrOpt('log-format',
default=None,
metavar='FORMAT',
help='DEPRECATED. '
'A logging.Formatter log message format string which may '
'use any of the available logging.LogRecord attributes. '
'This option is deprecated. Please use '
'logging_context_format_string and '
'logging_default_format_string instead.'),
cfg.StrOpt('log-date-format',
default=_DEFAULT_LOG_DATE_FORMAT,
metavar='DATE_FORMAT',
help='Format string for %%(asctime)s in log records. '
'Default: %(default)s'),
cfg.StrOpt('log-file',
metavar='PATH',
deprecated_name='logfile',
help='(Optional) Name of log file to output to. '
'If no default is set, logging will go to stdout.'),
cfg.StrOpt('log-dir',
deprecated_name='logdir',
help='(Optional) The base directory used for relative '
'--log-file paths'),
cfg.BoolOpt('use-syslog',
default=False,
help='Use syslog for logging.'),
cfg.StrOpt('syslog-log-facility',
default='LOG_USER',
help='syslog facility to receive log lines')
]
generic_log_opts = [
cfg.BoolOpt('use_stderr',
default=True,
help='Log output to standard error')
]
log_opts = [
cfg.StrOpt('logging_context_format_string',
default='%(asctime)s.%(msecs)03d %(process)d %(levelname)s '
'%(name)s [%(request_id)s %(user)s %(tenant)s] '
'%(instance)s%(message)s',
help='format string to use for log messages with context'),
cfg.StrOpt('logging_default_format_string',
default='%(asctime)s.%(msecs)03d %(process)d %(levelname)s '
'%(name)s [-] %(instance)s%(message)s',
help='format string to use for log messages without context'),
cfg.StrOpt('logging_debug_format_suffix',
default='%(funcName)s %(pathname)s:%(lineno)d',
help='data to append to log format when level is DEBUG'),
cfg.StrOpt('logging_exception_prefix',
default='%(asctime)s.%(msecs)03d %(process)d TRACE %(name)s '
'%(instance)s',
help='prefix each line of exception output with this format'),
cfg.ListOpt('default_log_levels',
default=[
'amqplib=WARN',
'sqlalchemy=WARN',
'boto=WARN',
'suds=INFO',
'keystone=INFO',
'eventlet.wsgi.server=WARN'
],
help='list of logger=LEVEL pairs'),
cfg.BoolOpt('publish_errors',
default=False,
help='publish error events'),
cfg.BoolOpt('fatal_deprecations',
default=False,
help='make deprecations fatal'),
# NOTE(mikal): there are two options here because sometimes we are handed
# a full instance (and could include more information), and other times we
# are just handed a UUID for the instance.
cfg.StrOpt('instance_format',
default='[instance: %(uuid)s] ',
help='If an instance is passed with the log message, format '
'it like this'),
cfg.StrOpt('instance_uuid_format',
default='[instance: %(uuid)s] ',
help='If an instance UUID is passed with the log message, '
'format it like this'),
]
CONF = cfg.CONF
CONF.register_cli_opts(common_cli_opts)
CONF.register_cli_opts(logging_cli_opts)
CONF.register_opts(generic_log_opts)
CONF.register_opts(log_opts)
# our new audit level
# NOTE(jkoelker) Since we synthesized an audit level, make the logging
# module aware of it so it acts like other levels.
logging.AUDIT = logging.INFO + 1
logging.addLevelName(logging.AUDIT, 'AUDIT')
try:
NullHandler = logging.NullHandler
except AttributeError: # NOTE(jkoelker) NullHandler added in Python 2.7
class NullHandler(logging.Handler):
def handle(self, record):
pass
def emit(self, record):
pass
def createLock(self):
self.lock = None
def _dictify_context(context):
if context is None:
return None
if not isinstance(context, dict) and getattr(context, 'to_dict', None):
context = context.to_dict()
return context
def _get_binary_name():
return os.path.basename(inspect.stack()[-1][1])
def _get_log_file_path(binary=None):
logfile = CONF.log_file
logdir = CONF.log_dir
if logfile and not logdir:
return logfile
if logfile and logdir:
return os.path.join(logdir, logfile)
if logdir:
binary = binary or _get_binary_name()
return '%s.log' % (os.path.join(logdir, binary),)
class BaseLoggerAdapter(logging.LoggerAdapter):
def audit(self, msg, *args, **kwargs):
self.log(logging.AUDIT, msg, *args, **kwargs)
class LazyAdapter(BaseLoggerAdapter):
def __init__(self, name='unknown', version='unknown'):
self._logger = None
self.extra = {}
self.name = name
self.version = version
@property
def logger(self):
if not self._logger:
self._logger = getLogger(self.name, self.version)
return self._logger
class ContextAdapter(BaseLoggerAdapter):
warn = logging.LoggerAdapter.warning
def __init__(self, logger, project_name, version_string):
self.logger = logger
self.project = project_name
self.version = version_string
@property
def handlers(self):
return self.logger.handlers
def deprecated(self, msg, *args, **kwargs):
stdmsg = _("Deprecated: %s") % msg
if CONF.fatal_deprecations:
self.critical(stdmsg, *args, **kwargs)
raise DeprecatedConfig(msg=stdmsg)
else:
self.warn(stdmsg, *args, **kwargs)
def process(self, msg, kwargs):
if 'extra' not in kwargs:
kwargs['extra'] = {}
extra = kwargs['extra']
context = kwargs.pop('context', None)
if not context:
context = getattr(local.store, 'context', None)
if context:
extra.update(_dictify_context(context))
instance = kwargs.pop('instance', None)
instance_extra = ''
if instance:
instance_extra = CONF.instance_format % instance
else:
instance_uuid = kwargs.pop('instance_uuid', None)
if instance_uuid:
instance_extra = (CONF.instance_uuid_format
% {'uuid': instance_uuid})
extra.update({'instance': instance_extra})
extra.update({"project": self.project})
extra.update({"version": self.version})
extra['extra'] = extra.copy()
return msg, kwargs
class JSONFormatter(logging.Formatter):
def __init__(self, fmt=None, datefmt=None):
# NOTE(jkoelker) we ignore the fmt argument, but its still there
# since logging.config.fileConfig passes it.
self.datefmt = datefmt
def formatException(self, ei, strip_newlines=True):
lines = traceback.format_exception(*ei)
if strip_newlines:
lines = [itertools.ifilter(
lambda x: x,
line.rstrip().splitlines()) for line in lines]
lines = list(itertools.chain(*lines))
return lines
def format(self, record):
message = {'message': record.getMessage(),
'asctime': self.formatTime(record, self.datefmt),
'name': record.name,
'msg': record.msg,
'args': record.args,
'levelname': record.levelname,
'levelno': record.levelno,
'pathname': record.pathname,
'filename': record.filename,
'module': record.module,
'lineno': record.lineno,
'funcname': record.funcName,
'created': record.created,
'msecs': record.msecs,
'relative_created': record.relativeCreated,
'thread': record.thread,
'thread_name': record.threadName,
'process_name': record.processName,
'process': record.process,
'traceback': None}
if hasattr(record, 'extra'):
message['extra'] = record.extra
if record.exc_info:
message['traceback'] = self.formatException(record.exc_info)
return jsonutils.dumps(message)
def _create_logging_excepthook(product_name):
def logging_excepthook(type, value, tb):
extra = {}
if CONF.verbose:
extra['exc_info'] = (type, value, tb)
getLogger(product_name).critical(str(value), **extra)
return logging_excepthook
class LogConfigError(Exception):
message = _('Error loading logging config %(log_config)s: %(err_msg)s')
def __init__(self, log_config, err_msg):
self.log_config = log_config
self.err_msg = err_msg
def __str__(self):
return self.message % dict(log_config=self.log_config,
err_msg=self.err_msg)
def _load_log_config(log_config):
try:
logging.config.fileConfig(log_config)
except moves.configparser.Error as exc:
raise LogConfigError(log_config, str(exc))
def setup(product_name):
"""Setup logging."""
if CONF.log_config:
_load_log_config(CONF.log_config)
else:
_setup_logging_from_conf()
sys.excepthook = _create_logging_excepthook(product_name)
def set_defaults(logging_context_format_string):
cfg.set_defaults(log_opts,
logging_context_format_string=
logging_context_format_string)
def _find_facility_from_conf():
facility_names = logging.handlers.SysLogHandler.facility_names
facility = getattr(logging.handlers.SysLogHandler,
CONF.syslog_log_facility,
None)
if facility is None and CONF.syslog_log_facility in facility_names:
facility = facility_names.get(CONF.syslog_log_facility)
if facility is None:
valid_facilities = facility_names.keys()
consts = ['LOG_AUTH', 'LOG_AUTHPRIV', 'LOG_CRON', 'LOG_DAEMON',
'LOG_FTP', 'LOG_KERN', 'LOG_LPR', 'LOG_MAIL', 'LOG_NEWS',
'LOG_AUTH', 'LOG_SYSLOG', 'LOG_USER', 'LOG_UUCP',
'LOG_LOCAL0', 'LOG_LOCAL1', 'LOG_LOCAL2', 'LOG_LOCAL3',
'LOG_LOCAL4', 'LOG_LOCAL5', 'LOG_LOCAL6', 'LOG_LOCAL7']
valid_facilities.extend(consts)
raise TypeError(_('syslog facility must be one of: %s') %
', '.join("'%s'" % fac
for fac in valid_facilities))
return facility
def _setup_logging_from_conf():
log_root = getLogger(None).logger
for handler in log_root.handlers:
log_root.removeHandler(handler)
if CONF.use_syslog:
facility = _find_facility_from_conf()
syslog = logging.handlers.SysLogHandler(address='/dev/log',
facility=facility)
log_root.addHandler(syslog)
logpath = _get_log_file_path()
if logpath:
filelog = logging.handlers.WatchedFileHandler(logpath)
log_root.addHandler(filelog)
if CONF.use_stderr:
streamlog = ColorHandler()
log_root.addHandler(streamlog)
elif not CONF.log_file:
# pass sys.stdout as a positional argument
# python2.6 calls the argument strm, in 2.7 it's stream
streamlog = logging.StreamHandler(sys.stdout)
log_root.addHandler(streamlog)
if CONF.publish_errors:
handler = importutils.import_object(
"pycadf.openstack.common.log_handler.PublishErrorsHandler",
logging.ERROR)
log_root.addHandler(handler)
datefmt = CONF.log_date_format
for handler in log_root.handlers:
# NOTE(alaski): CONF.log_format overrides everything currently. This
# should be deprecated in favor of context aware formatting.
if CONF.log_format:
handler.setFormatter(logging.Formatter(fmt=CONF.log_format,
datefmt=datefmt))
log_root.info('Deprecated: log_format is now deprecated and will '
'be removed in the next release')
else:
handler.setFormatter(ContextFormatter(datefmt=datefmt))
if CONF.debug:
log_root.setLevel(logging.DEBUG)
elif CONF.verbose:
log_root.setLevel(logging.INFO)
else:
log_root.setLevel(logging.WARNING)
for pair in CONF.default_log_levels:
mod, _sep, level_name = pair.partition('=')
level = logging.getLevelName(level_name)
logger = logging.getLogger(mod)
logger.setLevel(level)
_loggers = {}
def getLogger(name='unknown', version='unknown'):
if name not in _loggers:
_loggers[name] = ContextAdapter(logging.getLogger(name),
name,
version)
return _loggers[name]
def getLazyLogger(name='unknown', version='unknown'):
"""Returns lazy logger.
Creates a pass-through logger that does not create the real logger
until it is really needed and delegates all calls to the real logger
once it is created.
"""
return LazyAdapter(name, version)
class WritableLogger(object):
"""A thin wrapper that responds to `write` and logs."""
def __init__(self, logger, level=logging.INFO):
self.logger = logger
self.level = level
def write(self, msg):
self.logger.log(self.level, msg)
class ContextFormatter(logging.Formatter):
"""A context.RequestContext aware formatter configured through flags.
The flags used to set format strings are: logging_context_format_string
and logging_default_format_string. You can also specify
logging_debug_format_suffix to append extra formatting if the log level is
debug.
For information about what variables are available for the formatter see:
http://docs.python.org/library/logging.html#formatter
"""
def format(self, record):
"""Uses contextstring if request_id is set, otherwise default."""
# NOTE(sdague): default the fancier formating params
# to an empty string so we don't throw an exception if
# they get used
for key in ('instance', 'color'):
if key not in record.__dict__:
record.__dict__[key] = ''
if record.__dict__.get('request_id', None):
self._fmt = CONF.logging_context_format_string
else:
self._fmt = CONF.logging_default_format_string
if (record.levelno == logging.DEBUG and
CONF.logging_debug_format_suffix):
self._fmt += " " + CONF.logging_debug_format_suffix
# Cache this on the record, Logger will respect our formated copy
if record.exc_info:
record.exc_text = self.formatException(record.exc_info, record)
return logging.Formatter.format(self, record)
def formatException(self, exc_info, record=None):
"""Format exception output with CONF.logging_exception_prefix."""
if not record:
return logging.Formatter.formatException(self, exc_info)
stringbuffer = moves.StringIO()
traceback.print_exception(exc_info[0], exc_info[1], exc_info[2],
None, stringbuffer)
lines = stringbuffer.getvalue().split('\n')
stringbuffer.close()
if CONF.logging_exception_prefix.find('%(asctime)') != -1:
record.asctime = self.formatTime(record, self.datefmt)
formatted_lines = []
for line in lines:
pl = CONF.logging_exception_prefix % record.__dict__
fl = '%s%s' % (pl, line)
formatted_lines.append(fl)
return '\n'.join(formatted_lines)
class ColorHandler(logging.StreamHandler):
LEVEL_COLORS = {
logging.DEBUG: '\033[00;32m', # GREEN
logging.INFO: '\033[00;36m', # CYAN
logging.AUDIT: '\033[01;36m', # BOLD CYAN
logging.WARN: '\033[01;33m', # BOLD YELLOW
logging.ERROR: '\033[01;31m', # BOLD RED
logging.CRITICAL: '\033[01;31m', # BOLD RED
}
def format(self, record):
record.color = self.LEVEL_COLORS[record.levelno]
return logging.StreamHandler.format(self, record)
class DeprecatedConfig(Exception):
message = _("Fatal call to deprecated config: %(msg)s")
def __init__(self, msg):
super(Exception, self).__init__(self.message % dict(msg=msg))

View File

@@ -0,0 +1,188 @@
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2011 OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Time related utilities and helper functions.
"""
import calendar
import datetime
import iso8601
import six
# ISO 8601 extended time format with microseconds
_ISO8601_TIME_FORMAT_SUBSECOND = '%Y-%m-%dT%H:%M:%S.%f'
_ISO8601_TIME_FORMAT = '%Y-%m-%dT%H:%M:%S'
PERFECT_TIME_FORMAT = _ISO8601_TIME_FORMAT_SUBSECOND
def isotime(at=None, subsecond=False):
"""Stringify time in ISO 8601 format."""
if not at:
at = utcnow()
st = at.strftime(_ISO8601_TIME_FORMAT
if not subsecond
else _ISO8601_TIME_FORMAT_SUBSECOND)
tz = at.tzinfo.tzname(None) if at.tzinfo else 'UTC'
st += ('Z' if tz == 'UTC' else tz)
return st
def parse_isotime(timestr):
"""Parse time from ISO 8601 format."""
try:
return iso8601.parse_date(timestr)
except iso8601.ParseError as e:
raise ValueError(unicode(e))
except TypeError as e:
raise ValueError(unicode(e))
def strtime(at=None, fmt=PERFECT_TIME_FORMAT):
"""Returns formatted utcnow."""
if not at:
at = utcnow()
return at.strftime(fmt)
def parse_strtime(timestr, fmt=PERFECT_TIME_FORMAT):
"""Turn a formatted time back into a datetime."""
return datetime.datetime.strptime(timestr, fmt)
def normalize_time(timestamp):
"""Normalize time in arbitrary timezone to UTC naive object."""
offset = timestamp.utcoffset()
if offset is None:
return timestamp
return timestamp.replace(tzinfo=None) - offset
def is_older_than(before, seconds):
"""Return True if before is older than seconds."""
if isinstance(before, six.string_types):
before = parse_strtime(before).replace(tzinfo=None)
return utcnow() - before > datetime.timedelta(seconds=seconds)
def is_newer_than(after, seconds):
"""Return True if after is newer than seconds."""
if isinstance(after, six.string_types):
after = parse_strtime(after).replace(tzinfo=None)
return after - utcnow() > datetime.timedelta(seconds=seconds)
def utcnow_ts():
"""Timestamp version of our utcnow function."""
return calendar.timegm(utcnow().timetuple())
def utcnow():
"""Overridable version of utils.utcnow."""
if utcnow.override_time:
try:
return utcnow.override_time.pop(0)
except AttributeError:
return utcnow.override_time
return datetime.datetime.utcnow()
def iso8601_from_timestamp(timestamp):
"""Returns a iso8601 formated date from timestamp."""
return isotime(datetime.datetime.utcfromtimestamp(timestamp))
utcnow.override_time = None
def set_time_override(override_time=datetime.datetime.utcnow()):
"""Overrides utils.utcnow.
Make it return a constant time or a list thereof, one at a time.
"""
utcnow.override_time = override_time
def advance_time_delta(timedelta):
"""Advance overridden time using a datetime.timedelta."""
assert(not utcnow.override_time is None)
try:
for dt in utcnow.override_time:
dt += timedelta
except TypeError:
utcnow.override_time += timedelta
def advance_time_seconds(seconds):
"""Advance overridden time by seconds."""
advance_time_delta(datetime.timedelta(0, seconds))
def clear_time_override():
"""Remove the overridden time."""
utcnow.override_time = None
def marshall_now(now=None):
"""Make an rpc-safe datetime with microseconds.
Note: tzinfo is stripped, but not required for relative times.
"""
if not now:
now = utcnow()
return dict(day=now.day, month=now.month, year=now.year, hour=now.hour,
minute=now.minute, second=now.second,
microsecond=now.microsecond)
def unmarshall_time(tyme):
"""Unmarshall a datetime dict."""
return datetime.datetime(day=tyme['day'],
month=tyme['month'],
year=tyme['year'],
hour=tyme['hour'],
minute=tyme['minute'],
second=tyme['second'],
microsecond=tyme['microsecond'])
def delta_seconds(before, after):
"""Return the difference between two timing objects.
Compute the difference in seconds between two date, time, or
datetime objects (as a float, to microsecond resolution).
"""
delta = after - before
try:
return delta.total_seconds()
except AttributeError:
return ((delta.days * 24 * 3600) + delta.seconds +
float(delta.microseconds) / (10 ** 6))
def is_soon(dt, window):
"""Determines if time is going to happen in the next window seconds.
:params dt: the time
:params window: minimum seconds to remain to consider the time not soon
:return: True if expiration is within the given duration
"""
soon = (utcnow() + datetime.timedelta(seconds=window))
return normalize_time(dt) <= soon

38
pycadf/path.py Normal file
View File

@@ -0,0 +1,38 @@
# -*- encoding: utf-8 -*-
#
# Copyright 2013 IBM Corp.
#
# Author: Matt Rutkowski <mrutkows@us.ibm.com>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from pycadf import cadftype
class Path(cadftype.CADFAbstractType):
def set_path_absolute(self):
# TODO(mrutkows): validate absolute path format, else Type error
raise NotImplementedError()
def set_path_relative(self):
# TODO(mrutkows); validate relative path format, else Type error
raise NotImplementedError()
# TODO(mrutkows): validate any cadf:Path (type) record against CADF schema
@staticmethod
def is_valid(value):
if not isinstance(value, str):
raise TypeError
return True

70
pycadf/reason.py Normal file
View File

@@ -0,0 +1,70 @@
# -*- encoding: utf-8 -*-
#
# Copyright 2013 IBM Corp.
#
# Author: Matt Rutkowski <mrutkows@us.ibm.com>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from pycadf import cadftype
TYPE_URI_REASON = cadftype.CADF_VERSION_1_0_0 + 'reason'
REASON_KEYNAME_REASONTYPE = "reasonType"
REASON_KEYNAME_REASONCODE = "reasonCode"
REASON_KEYNAME_POLICYTYPE = "policyType"
REASON_KEYNAME_POLICYID = "policyId"
REASON_KEYNAMES = [REASON_KEYNAME_REASONTYPE,
REASON_KEYNAME_REASONCODE,
REASON_KEYNAME_POLICYTYPE,
REASON_KEYNAME_POLICYID]
class Reason(cadftype.CADFAbstractType):
reasonType = cadftype.ValidatorDescriptor(REASON_KEYNAME_REASONTYPE,
lambda x: isinstance(x, str))
reasonCode = cadftype.ValidatorDescriptor(REASON_KEYNAME_REASONCODE,
lambda x: isinstance(x, str))
policyType = cadftype.ValidatorDescriptor(REASON_KEYNAME_POLICYTYPE,
lambda x: isinstance(x, str))
policyId = cadftype.ValidatorDescriptor(REASON_KEYNAME_POLICYID,
lambda x: isinstance(x, str))
def __init__(self, reasonType=None, reasonCode=None, policyType=None,
policyId=None):
# Reason.reasonType
if reasonType is not None:
setattr(self, REASON_KEYNAME_REASONTYPE, reasonType)
# Reason.reasonCode
if reasonCode is not None:
setattr(self, REASON_KEYNAME_REASONCODE, reasonCode)
# Reason.policyType
if policyType is not None:
setattr(self, REASON_KEYNAME_POLICYTYPE, policyType)
# Reason.policyId
if policyId is not None:
setattr(self, REASON_KEYNAME_POLICYID, policyId)
# TODO(mrutkows): validate this cadf:Reason type against schema
def is_valid(self):
# MUST have at least one valid pairing of reason+code or policy+id
return ((hasattr(self, REASON_KEYNAME_REASONTYPE) and
hasattr(self, REASON_KEYNAME_REASONCODE)) or
(hasattr(self, REASON_KEYNAME_POLICYTYPE) and
hasattr(self, REASON_KEYNAME_POLICYID)))

76
pycadf/reporterstep.py Normal file
View File

@@ -0,0 +1,76 @@
# -*- encoding: utf-8 -*-
#
# Copyright 2013 IBM Corp.
#
# Author: Matt Rutkowski <mrutkows@us.ibm.com>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from pycadf import cadftype
from pycadf import identifier
from pycadf import resource
from pycadf import timestamp
REPORTERSTEP_KEYNAME_ROLE = "role"
REPORTERSTEP_KEYNAME_REPORTER = "reporter"
REPORTERSTEP_KEYNAME_REPORTERID = "reporterId"
REPORTERSTEP_KEYNAME_REPORTERTIME = "reporterTime"
#REPORTERSTEP_KEYNAME_ATTACHMENTS = "attachments"
REPORTERSTEP_KEYNAMES = [REPORTERSTEP_KEYNAME_ROLE,
REPORTERSTEP_KEYNAME_REPORTER,
REPORTERSTEP_KEYNAME_REPORTERID,
REPORTERSTEP_KEYNAME_REPORTERTIME,
#REPORTERSTEP_KEYNAME_ATTACHMENTS
]
class Reporterstep(cadftype.CADFAbstractType):
role = cadftype.ValidatorDescriptor(
REPORTERSTEP_KEYNAME_ROLE,
lambda x: cadftype.is_valid_reporter_role(x))
reporter = cadftype.ValidatorDescriptor(
REPORTERSTEP_KEYNAME_REPORTER,
(lambda x: isinstance(x, resource.Resource) or
(isinstance(x, str) and
(x == 'initiator' or x == 'target'))))
reporterId = cadftype.ValidatorDescriptor(
REPORTERSTEP_KEYNAME_REPORTERID, lambda x: identifier.is_valid(x))
reporterTime = cadftype.ValidatorDescriptor(
REPORTERSTEP_KEYNAME_REPORTERTIME, lambda x: timestamp.is_valid(x))
def __init__(self, role=cadftype.REPORTER_ROLE_OBSERVER,
reporterTime=None, reporter=None, reporterId=None):
# Reporterstep.role
setattr(self, REPORTERSTEP_KEYNAME_ROLE, role)
# Reporterstep.reportTime
if reporterTime is not None:
setattr(self, REPORTERSTEP_KEYNAME_REPORTERTIME, reporterTime)
# Reporterstep.reporter
if reporter is not None:
setattr(self, REPORTERSTEP_KEYNAME_REPORTER, reporter)
# Reporterstep.reporterId
if reporterId is not None:
setattr(self, REPORTERSTEP_KEYNAME_REPORTERID, reporterId)
# self validate this cadf:Reporterstep type against schema
def is_valid(self):
return (
hasattr(self, REPORTERSTEP_KEYNAME_ROLE) and
(hasattr(self, REPORTERSTEP_KEYNAME_REPORTER) or
hasattr(self, REPORTERSTEP_KEYNAME_REPORTERID))
)

121
pycadf/resource.py Normal file
View File

@@ -0,0 +1,121 @@
# -*- encoding: utf-8 -*-
#
# Copyright 2013 IBM Corp.
#
# Author: Matt Rutkowski <mrutkows@us.ibm.com>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from pycadf import attachment
from pycadf import cadftaxonomy
from pycadf import cadftype
from pycadf import geolocation
from pycadf import identifier
TYPE_URI_RESOURCE = cadftype.CADF_VERSION_1_0_0 + 'resource'
RESOURCE_KEYNAME_TYPEURI = "typeURI"
RESOURCE_KEYNAME_ID = "id"
RESOURCE_KEYNAME_NAME = "name"
RESOURCE_KEYNAME_DOMAIN = "domain"
RESOURCE_KEYNAME_REF = "ref"
RESOURCE_KEYNAME_GEO = "geolocation"
RESOURCE_KEYNAME_GEOID = "geolocationId"
RESOURCE_KEYNAME_ATTACHMENTS = "attachments"
RESOURCE_KEYNAMES = [RESOURCE_KEYNAME_TYPEURI,
RESOURCE_KEYNAME_ID,
RESOURCE_KEYNAME_NAME,
RESOURCE_KEYNAME_DOMAIN,
RESOURCE_KEYNAME_REF,
RESOURCE_KEYNAME_GEO,
RESOURCE_KEYNAME_GEOID,
RESOURCE_KEYNAME_ATTACHMENTS]
class Resource(cadftype.CADFAbstractType):
typeURI = cadftype.ValidatorDescriptor(
RESOURCE_KEYNAME_TYPEURI, lambda x: cadftaxonomy.is_valid_resource(x))
id = cadftype.ValidatorDescriptor(RESOURCE_KEYNAME_ID,
lambda x: identifier.is_valid(x))
name = cadftype.ValidatorDescriptor(RESOURCE_KEYNAME_NAME,
lambda x: isinstance(x, str))
domain = cadftype.ValidatorDescriptor(RESOURCE_KEYNAME_DOMAIN,
lambda x: isinstance(x, str))
# TODO(mrutkows): validate the "ref" attribute is indeed a URI (format),
# If it is a URL, we do not need to validate it is accessible/working,
# for audit purposes this could have been a valid URL at some point
# in the past or a URL that is only valid within some domain (e.g. a
# private cloud)
ref = cadftype.ValidatorDescriptor(RESOURCE_KEYNAME_REF,
lambda x: isinstance(x, str))
geolocation = cadftype.ValidatorDescriptor(
RESOURCE_KEYNAME_GEO,
lambda x: isinstance(x, geolocation.Geolocation))
geolocationId = cadftype.ValidatorDescriptor(
RESOURCE_KEYNAME_GEOID, lambda x: identifier.is_valid(x))
def __init__(self, id=identifier.generate_uuid(),
typeURI=cadftaxonomy.UNKNOWN, name=None, ref=None,
domain=None, geolocation=None, geolocationId=None):
# Resource.id
setattr(self, RESOURCE_KEYNAME_ID, id)
# Resource.typeURI
setattr(self, RESOURCE_KEYNAME_TYPEURI, typeURI)
# Resource.name
if name is not None:
setattr(self, RESOURCE_KEYNAME_NAME, name)
# Resource.ref
if ref is not None:
setattr(self, RESOURCE_KEYNAME_REF, ref)
# Resource.domain
if domain is not None:
setattr(self, RESOURCE_KEYNAME_DOMAIN, domain)
# Resource.geolocation
if geolocation is not None:
setattr(self, RESOURCE_KEYNAME_GEO, geolocation)
# Resource.geolocationId
if geolocationId:
setattr(self, RESOURCE_KEYNAME_GEOID, geolocationId)
# Resource.attachments
def add_attachment(self, attach_val):
if (attach_val is not None
and isinstance(attach_val, attachment.Attachment)):
if attach_val.is_valid():
# Create the list of Attachments if needed
if not hasattr(self, RESOURCE_KEYNAME_ATTACHMENTS):
setattr(self, RESOURCE_KEYNAME_ATTACHMENTS, list())
attachments = getattr(self, RESOURCE_KEYNAME_ATTACHMENTS)
attachments.append(attach_val)
else:
raise ValueError('Invalid attachment')
else:
raise ValueError('Invalid attachment. Value must be an Attachment')
# self validate this cadf:Resource type against schema
def is_valid(self):
return (
hasattr(self, RESOURCE_KEYNAME_TYPEURI) and
hasattr(self, RESOURCE_KEYNAME_ID)
)
# TODO(mrutkows): validate the Resource's attribute types

34
pycadf/tag.py Normal file
View File

@@ -0,0 +1,34 @@
# -*- encoding: utf-8 -*-
#
# Copyright 2013 IBM Corp.
#
# Author: Matt Rutkowski <mrutkows@us.ibm.com>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
def generate_name_value_tag(name, value):
# TODO(mrutkows): detailed test/concatenation of independent values
# into a URI
if name is None or value is None:
raise ValueError('Invalid name and/or value. Values cannot be None')
tag = name + "?value=" + value
return tag
# TODO(mrutkows): validate any Tag's name?value= format
def is_valid(value):
if not isinstance(value, str):
raise TypeError
return True

View File

@@ -0,0 +1,135 @@
#
# Copyright 2013 OpenStack LLC
# All Rights Reserved
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import testtools
from pycadf import attachment
from pycadf import event
from pycadf import geolocation
from pycadf import identifier
from pycadf import measurement
from pycadf import metric
from pycadf import reason
from pycadf import reporterstep
from pycadf import resource
from pycadf import tag
from pycadf import timestamp
class TestCADFSpec(testtools.TestCase):
def test_geolocation(self):
geo = geolocation.Geolocation(id=identifier.generate_uuid(),
latitude='43.6481 N',
longitude='79.4042 W',
elevation='0',
accuracy='1',
city='toronto',
state='ontario',
regionICANN='ca')
dict_geo = geo.as_dict()
for key in geolocation.GEO_KEYNAMES:
self.assertIn(key, dict_geo)
def test_metric(self):
metric_val = metric.Metric(metricId=identifier.generate_uuid(),
unit='b',
name='bytes')
dict_metric_val = metric_val.as_dict()
for key in metric.METRIC_KEYNAMES:
self.assertIn(key, dict_metric_val)
def test_measurement(self):
measure_val = measurement.Measurement(
result='100',
metric=metric.Metric(),
metricId=identifier.generate_uuid(),
calculatedBy=resource.Resource(typeURI='storage'))
dict_measure_val = measure_val.as_dict()
for key in measurement.MEASUREMENT_KEYNAMES:
self.assertIn(key, dict_measure_val)
def test_reason(self):
reason_val = reason.Reason(reasonType='HTTP',
reasonCode='200',
policyType='poltype',
policyId=identifier.generate_uuid())
dict_reason_val = reason_val.as_dict()
for key in reason.REASON_KEYNAMES:
self.assertIn(key, dict_reason_val)
def test_reporterstep(self):
step = reporterstep.Reporterstep(
role='observer',
reporter=resource.Resource(typeURI='storage'),
reporterId=identifier.generate_uuid(),
reporterTime=timestamp.get_utc_now())
dict_step = step.as_dict()
for key in reporterstep.REPORTERSTEP_KEYNAMES:
self.assertIn(key, dict_step)
def test_attachment(self):
attach = attachment.Attachment(typeURI='attachURI',
content='content',
name='attachment_name')
dict_attach = attach.as_dict()
for key in attachment.ATTACHMENT_KEYNAMES:
self.assertIn(key, dict_attach)
def test_resource(self):
res = resource.Resource(typeURI='storage',
name='res_name',
domain='res_domain',
ref='res_ref',
geolocation=geolocation.Geolocation(),
geolocationId=identifier.generate_uuid())
res.add_attachment(attachment.Attachment(typeURI='attachURI',
content='content',
name='attachment_name'))
dict_res = res.as_dict()
for key in resource.RESOURCE_KEYNAMES:
self.assertIn(key, dict_res)
def test_event(self):
ev = event.Event(eventType='activity',
id=identifier.generate_uuid(),
eventTime=timestamp.get_utc_now(),
initiator=resource.Resource(typeURI='storage'),
initiatorId=identifier.generate_uuid(),
action='read',
target=resource.Resource(typeURI='storage'),
targetId=identifier.generate_uuid(),
outcome='success',
reason=reason.Reason(reasonType='HTTP',
reasonCode='200'),
severity='high')
ev.add_measurement(measurement.Measurement(result='100'))
ev.add_tag(tag.generate_name_value_tag('name', 'val'))
ev.add_attachment(attachment.Attachment(typeURI='attachURI',
content='content',
name='attachment_name'))
ev.add_reporterstep(reporterstep.Reporterstep(
role='observer',
reporterId=identifier.generate_uuid()))
dict_ev = ev.as_dict()
for key in event.EVENT_KEYNAMES:
self.assertIn(key, dict_ev)

44
pycadf/timestamp.py Normal file
View File

@@ -0,0 +1,44 @@
# -*- encoding: utf-8 -*-
#
# Copyright 2013 IBM Corp.
#
# Author: Matt Rutkowski <mrutkows@us.ibm.com>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import datetime
import pytz
from pycadf.openstack.common import log as logging
LOG = logging.getLogger(__name__)
TIME_FORMAT = "%Y-%m-%dT%H:%M:%S.%f%z"
def get_utc_now(timezone=None):
utc_datetime = pytz.utc.localize(datetime.datetime.utcnow())
if timezone is not None:
try:
utc_datetime = utc_datetime.astimezone(pytz.timezone(timezone))
except Exception as e:
LOG.error('Unknown timezone: %s' % e)
return utc_datetime.strftime(TIME_FORMAT)
# TODO(mrutkows): validate any cadf:Timestamp (type) record against
# CADF schema
def is_valid(value):
if not isinstance(value, str):
raise ValueError('Timestamp should be a String')
return True

View File

@@ -1 +1,6 @@
Babel>=0.9.6
iso8601>=0.1.4
netaddr
oslo.config>=1.1.0
pytz>=2010h
six