Prepare first commit

This patch aims to prepare the contributions on this repository:

* Fix PEP8 errors
* Add .gitreview file
* fix build_sphinx
* test py26

Change-Id: I4499297c94ad2a853e322753a7ee975f0f673472
This commit is contained in:
Emilien Macchi
2014-10-16 09:21:16 -04:00
committed by Julien Danjou
parent 0498c34c76
commit cc4df85845
16 changed files with 392 additions and 76 deletions

1
.gitignore vendored
View File

@@ -4,3 +4,4 @@ ChangeLog
*.egg-info
*.pyc
.testrepository
doc/build

4
.gitreview Normal file
View File

@@ -0,0 +1,4 @@
[gerrit]
host=review.openstack.org
port=29418
project=stackforge/monitoring-for-openstack.git

View File

@@ -1,4 +1,4 @@
[DEFAULT]
test_command=${PYTHON:-python} -m subunit.run discover oschecks $LISTOPT $IDOPTION
test_command=${PYTHON:-python} -m subunit.run discover ./oschecks/tests $LISTOPT $IDOPTION
test_id_option=--load-list $IDFILE
test_list_option=--list

258
doc/source/conf.py Normal file
View File

@@ -0,0 +1,258 @@
# -*- coding: utf-8 -*-
#
# monitoring-for-openstack documentation build configuration file, created by
# sphinx-quickstart on Tue Oct 28 18:03:41 2014.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = []
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'monitoring-for-openstack'
copyright = u'2014, eNovance'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '1'
# The full version, including alpha/beta/rc tags.
release = '1'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = []
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'monitoring-for-openstackdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
('index', 'monitoring-for-openstack.tex', u'monitoring-for-openstack Documentation',
u'eNovance', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'monitoring-for-openstack', u'monitoring-for-openstack Documentation',
[u'eNovance'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'monitoring-for-openstack', u'monitoring-for-openstack Documentation',
u'eNovance', 'monitoring-for-openstack', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False

22
doc/source/index.rst Normal file
View File

@@ -0,0 +1,22 @@
.. monitoring-for-openstack documentation master file, created by
sphinx-quickstart on Tue Oct 28 18:03:41 2014.
You can adapt this file completely to your liking, but it should at least
contain the root `toctree` directive.
Welcome to monitoring-for-openstack's documentation!
====================================================
Contents:
.. toctree::
:maxdepth: 2
Indices and tables
==================
* :ref:`genindex`
* :ref:`modindex`
* :ref:`search`

View File

@@ -34,12 +34,17 @@ def remaning(avail, total):
def interpret_output_df(output):
'''Parse the output of ceph health and return an exit code and
message compatible with nagios.'''
'''Parse the output of ceph health.
Return an exit code and message compatible with nagios.
'''
try:
data = json.loads(output)
except Exception:
return (1, 'CEPH WARNING: unable to parse ceph df %s' % traceback.format_exc())
return (1, 'CEPH WARNING: unable to parse ceph df %s' %
traceback.format_exc())
warn_percent = int(sys.argv[1]) if len(sys.argv) >= 2 else 85
crit_percent = int(sys.argv[2]) if len(sys.argv) >= 3 else 98
@@ -62,6 +67,7 @@ message compatible with nagios.'''
def check_ceph_df():
'Program entry point.'
try:
res = subprocess.check_output(["ceph", "df", "--format=json"],
stderr=subprocess.STDOUT)
@@ -77,8 +83,12 @@ def check_ceph_df():
def interpret_output_health(output):
'''Parse the output of ceph health and return an exit code and
message compatible with nagios.'''
'''Parse the output of ceph health.
Return an exit code and message compatible with nagios.
'''
tokens = output.split(' ')
if len(tokens) == 1:
tokens[0] = tokens[0].strip()
@@ -95,6 +105,7 @@ message compatible with nagios.'''
def check_ceph_health():
'Program entry point.'
try:
res = subprocess.check_output(["ceph", "health"],
stderr=subprocess.STDOUT)

View File

@@ -18,13 +18,13 @@
# License for the specific language governing permissions and limitations
# under the License.
import argparse
from datetime import datetime
import datetime
import logging
import os
import time
import urlparse
from cinderclient.client import Client
from cinderclient.client import Client # noqa
from cinderclient import exceptions
from oschecks import utils
@@ -83,8 +83,8 @@ class Novautils(object):
dt = datetime.utcnow()
td = dt - epoch
# return td.total_seconds()
return int((td.microseconds + (td.seconds + td.days * 24 * 3600) * 10**6)
/ 1e6)
return int((td.microseconds +
(td.seconds + td.days * 24 * 3600) * 10**6) / 1e6)
def check_connection(self, force=False):
if not self.connection_done or force:
@@ -164,7 +164,7 @@ class Novautils(object):
self.volume = self.nova_client.volumes.create(**conf)
except Exception as e:
self.msgs.append("Cannot create the volume %s (%s)"
% (args.volume_name, e))
% (volume_name, e))
def volume_ready(self, timeout):
if not self.msgs:
@@ -236,9 +236,9 @@ def _check_cinder_volume():
+ 'Public by default.')
parser.add_argument('--force_delete', action='store_true',
help='If matching volumes are found, delete them and add '
+ 'a notification in the message instead of getting out '
+ 'in critical state.')
help='If matching volumes are found, delete them and '
+ 'add a notification in the message instead of '
+ 'getting out in critical state.')
parser.add_argument('--api_version', metavar='api_version', type=str,
default='1',

View File

@@ -64,7 +64,7 @@ def _check_glance_image_exists():
help='name of images who must be available')
options, args, client = glance.setup()
#Flags resultat
# Flags resultat
valid_image = 0
count = len(list(client.images.list(**{"limit": options.req_count or 1})))
@@ -75,7 +75,7 @@ def _check_glance_image_exists():
if len(list(client.images.list(
**{"filters": {"name": image}}))) == 1:
valid_image = valid_image + 1
except:
except Exception:
pass
if options.req_count and count < options.req_count:

View File

@@ -19,14 +19,14 @@
# License for the specific language governing permissions and limitations
# under the License.
import argparse
from datetime import datetime
import datetime
import logging
import os
import re
import urlparse
from keystoneclient.v2_0 import client
from neutronclient.neutron import client as neutron
from neutronclient.neutron import client as neutron
from oschecks import utils
@@ -114,8 +114,8 @@ class Novautils(object):
dt = datetime.utcnow()
td = dt - epoch
# return td.total_seconds()
return int((td.microseconds + (td.seconds + td.days * 24 * 3600) * 10**6)
/ 1e6)
return int((td.microseconds + (td.seconds + td.days * 24 * 3600)
* 10**6) / 1e6)
def check_connection(self, force=False):
if not self.connection_done or force:
@@ -130,19 +130,18 @@ class Novautils(object):
def list_floating_ips(self):
if not self.all_floating_ips:
# TODO: my setup does not have pagination enable, so I didn't
# took this into account.
for floating_ip in self.nova_client.list_floatingips(
fields=['floating_ip_address', 'id'],
tenant_id=self.tenant_id)['floatingips']:
self.all_floating_ips.append(floating_ip)
return self.all_floating_ips
def check_existing_floatingip(self, floating_ip=None, delete=False):
count = 0
found_ips = []
for ip in self.list_floating_ips():
if floating_ip == 'all' or floating_ip.match(ip['floating_ip_address']):
if floating_ip == 'all' or floating_ip.match(
ip['floating_ip_address']):
if delete:
# asynchronous call, we do not check that it worked
self.nova_client.delete_floatingip(ip['id'])
@@ -151,7 +150,8 @@ class Novautils(object):
if count > 0:
if delete:
self.notifications.append("Found %d ip(s): %s"
% (count, '{' + ', '.join(found_ips) + '}'))
% (count, '{' + ', '.join(
found_ips) + '}'))
else:
self.msgs.append("Found %d ip(s): %s. "
% (count, ', '.join(found_ips))
@@ -162,37 +162,45 @@ class Novautils(object):
if not self.msgs:
if not self.network_id:
try:
self.network_id = self.nova_client.list_networks(name=router_name,fields='id')['networks'][0]['id']
except Exception as e:
self.msgs.append("Cannot find ext router named '%s'." % router_name)
self.network_id = self.nova_client.list_networks(
name=router_name, fields='id')['networks'][0]['id']
except Exception:
self.msgs.append("Cannot find ext router named '%s'."
% router_name)
def create_floating_ip(self):
if not self.msgs:
try:
body={'floatingip': {'floating_network_id': self.network_id}}
body = {'floatingip': {'floating_network_id': self.network_id}}
self.fip = self.nova_client.create_floatingip(body=body)
self.notifications.append("fip=%s" % self.fip['floatingip']['floating_ip_address'])
self.notifications.append(
"fip=%s" % self.fip['floatingip']['floating_ip_address'])
except Exception as e:
self.msgs.append("Cannot create a floating ip: %s" % e)
def delete_floating_ip(self):
if not self.msgs:
try:
self.nova_client.delete_floatingip(self.fip['floatingip']['id'])
except Exception as e:
self.msgs.append("Cannot remove floating ip %s" % self.fip['floatingip']['id'])
self.nova_client.delete_floatingip(
self.fip['floatingip']['id'])
except Exception:
self.msgs.append("Cannot remove floating ip %s"
% self.fip['floatingip']['id'])
def fip_type(string):
if string == 'all':
return 'all'
else:
return re.compile(string)
def _check_neutron_floating_ip():
parser = argparse.ArgumentParser(
description='Check an Floating ip creation. Note that\'s it\'s able to delete *all* floating ips from a account, so ensure that nothing important is running on the specified account.')
description='Check an Floating ip creation. Note that it is able '
+ 'to delete *all* floating ips from a account, so '
+ 'ensure that nothing important is running on the '
+ 'specified account.')
parser.add_argument('--auth_url', metavar='URL', type=str,
default=os.getenv('OS_AUTH_URL'),
help='Keystone URL')
@@ -218,24 +226,25 @@ def _check_neutron_floating_ip():
+ 'Public by default.')
parser.add_argument('--force_delete', action='store_true',
help='If matching floating ip are found, delete them and add '
+ 'a notification in the message instead of getting out '
+ 'in critical state.')
help='If matching floating ip are found, delete them '
+ 'and add a notification in the message instead of '
+ 'getting out in critical state.')
parser.add_argument('--timeout', metavar='timeout', type=int,
default=120,
help='Max number of second to create/delete a floating ip '
+ '(120 by default).')
help='Max number of second to create/delete a '
+ 'floating ip (120 by default).')
parser.add_argument('--floating_ip', metavar='floating_ip', type=fip_type,
default=None,
help='Regex of IP(s) to check for existance. '
+ 'This value can be "all" for conveniance (match all ip). '
+ 'This permit to avoid certain floating ip to be kept. '
+ 'Its default value prevents the removal of any existing floating ip')
+ 'This value can be "all" for conveniance (match '
+ 'all ip). This permit to avoid certain floating '
+ 'ip to be kept. Its default value prevents the '
+ 'removal of any existing floating ip')
parser.add_argument('--ext_router_name', metavar='ext_router_name', type=str,
default='public',
parser.add_argument('--ext_router_name', metavar='ext_router_name',
type=str, default='public',
help='Name of the "public" router (public by default)')
parser.add_argument('--verbose', action='count',
@@ -257,14 +266,16 @@ def _check_neutron_floating_ip():
utils.critical("Authentication error: %s\n" % e)
try:
endpoint = nova_client.service_catalog.get_endpoints('network')['network'][0][args.endpoint_type]
endpoint = nova_client.service_catalog.get_endpoints(
'network')['network'][0][args.endpoint_type]
if args.endpoint_url:
endpoint = mangle_url(endpoint, args.endpoint_url)
token = nova_client.service_catalog.get_token()['id']
if args.verbose:
logging.basicConfig(level=logging.DEBUG)
neutron_client = neutron.Client('2.0', endpoint_url=endpoint, token=token)
neutron_client = neutron.Client('2.0', endpoint_url=endpoint,
token=token)
except Exception as e:
utils.critical("Error creating neutron object: %s\n" % e)

View File

@@ -19,12 +19,12 @@
# License for the specific language governing permissions and limitations
# under the License.
import argparse
from datetime import datetime
import datetime
import logging
import os
import time
from novaclient.client import Client
from novaclient.client import Client # noqa
from novaclient import exceptions
from six.moves import urllib
@@ -83,13 +83,13 @@ class Novautils(object):
# now, after checking http://stackoverflow.com/a/16307378,
# and http://stackoverflow.com/a/8778548 made my mind to this approach
@staticmethod
def totimestamp(dt=None, epoch=datetime(1970, 1, 1)):
def totimestamp(dt=None, epoch=datetime.datetime(1970, 1, 1)):
if not dt:
dt = datetime.utcnow()
dt = datetime.datetime.utcnow()
td = dt - epoch
# return td.total_seconds()
return int((td.microseconds + (td.seconds + td.days * 24 * 3600) * 10**6)
/ 1e6)
return int((td.microseconds + (td.seconds + td.days * 24 * 3600)
* 10**6) / 1e6)
def check_connection(self, force=False):
if not self.connection_done or force:
@@ -309,9 +309,9 @@ def _check_nova_instance():
% default_instance_name)
parser.add_argument('--force_delete', action='store_true',
help='If matching instances are found delete them and add'
+ 'a notification in the message instead of getting out'
+ 'in critical state.')
help='If matching instances are found delete them and '
+ 'add a notification in the message instead of '
+ 'getting out in critical state.')
parser.add_argument('--api_version', metavar='api_version', type=str,
default='2',
@@ -324,8 +324,8 @@ def _check_nova_instance():
parser.add_argument('--timeout_delete', metavar='timeout_delete', type=int,
default=45,
help='Max number of second to delete an existing instance'
+ '(45 by default).')
help='Max number of second to delete an existing '
+ 'instance (45 by default).')
parser.add_argument('--insecure', action='store_true',
help="The server's cert will not be verified")

View File

@@ -19,9 +19,9 @@
# under the License.
import argparse
import os
import shlex
import subprocess
import os
try:
import utils

View File

@@ -24,20 +24,21 @@ class TestCephHealth(unittest.TestCase):
def test_interpret_output_ok(self):
exit_code, message = ceph.interpret_output_health('HEALTH_OK message')
self.assertEquals(exit_code, 0)
self.assertEquals(message, 'CEPH OK: message')
self.assertEqual(exit_code, 0)
self.assertEqual(message, 'CEPH OK: message')
def test_interpret_output_warn(self):
exit_code, message = ceph.interpret_output_health('HEALTH_WARN message')
self.assertEquals(exit_code, 1)
self.assertEquals(message, 'CEPH WARNING: message')
exit_code, message = ceph.interpret_output_health('HEALTH_WARN '
'message')
self.assertEqual(exit_code, 1)
self.assertEqual(message, 'CEPH WARNING: message')
def test_interpret_output_critical(self):
exit_code, message = ceph.interpret_output_health('HEALTH_ERR message')
self.assertEquals(exit_code, 2)
self.assertEquals(message, 'CEPH CRITICAL: message')
self.assertEqual(exit_code, 2)
self.assertEqual(message, 'CEPH CRITICAL: message')
def test_interpret_output_unknown(self):
exit_code, message = ceph.interpret_output_health('strange message')
self.assertEquals(exit_code, 3)
self.assertEquals(message, 'CEPH UNKNOWN: strange message')
self.assertEqual(exit_code, 3)
self.assertEqual(message, 'CEPH UNKNOWN: strange message')

View File

@@ -21,10 +21,10 @@
import copy
import itertools
import time
import traceback
import os
import sys
import time
import traceback
import psutil

View File

@@ -14,6 +14,7 @@ classifier =
Operating System :: POSIX :: Linux
Programming Language :: Python
Programming Language :: Python :: 2
Programming Language :: Python :: 2.6
Programming Language :: Python :: 2.7
Programming Language :: Python :: 3.3
Programming Language :: Python :: 3.4
@@ -34,7 +35,7 @@ scripts =
legacy/oschecks-check_swift_object_servers
legacy/oschecks-check_swift_recon
legacy/oschecks-check_swift_ring_usage
[entry_points]
console_scripts =
oschecks-check_amqp = oschecks.amqp:main
@@ -49,6 +50,11 @@ console_scripts =
oschecks-check_keystone_api = oschecks.keystone:check_keystone_api
oschecks-check_neutron_api = oschecks.neutron:check_neutron_api
oschecks-check_neutron_floating_ip = oschecks.neutron:check_neutron_floating_ip
oschecks-check_nova_api = oschecks.nova:check_nova_api
schecks-check_nova_api = oschecks.nova:check_nova_api
oschecks-check_nova_instance = oschecks.nova:check_nova_instance
oschecks-pacemaker_host_check = oschecks.pacemaker_host_check:pacemaker_host_check
[build_sphinx]
all_files = 1
build-dir = doc/build
source-dir = doc/source

View File

@@ -1 +1,3 @@
testrepository
discover
sphinx

View File

@@ -1,5 +1,5 @@
[tox]
envlist = py27,py33,py34,pep8
envlist = py26,py27,py33,py34,pep8
[testenv]
usedevelop = True
@@ -18,5 +18,5 @@ commands = flake8
commands = {posargs}
[flake8]
exclude = .tox
exclude = .tox,doc
show-source = true