Bump hacking to 0.9.x series
Require at least 0.9.1 because 0.9.0. had a minor bug. This change also fixes all found issues. Rewrote some docstrings to fit H405. Change-Id: Ifeef11b783dbe70b2652d2b9ea29d5b20f69f2ce
This commit is contained in:
parent
8bfbbf01a1
commit
d449a6bcd2
|
@ -26,13 +26,12 @@ import os
|
|||
import pickle
|
||||
import re
|
||||
import sys
|
||||
|
||||
import git
|
||||
import stevedore
|
||||
import xml.sax.saxutils
|
||||
|
||||
import git
|
||||
from hooks import HOOKS
|
||||
import openstack.common.config.generator as generator
|
||||
import stevedore
|
||||
|
||||
|
||||
TABLE_HEADER = '''<?xml version="1.0" encoding="UTF-8"?>
|
||||
|
|
|
@ -31,9 +31,7 @@ PROJECTS = ['ceilometer', 'cinder', 'glance', 'heat', 'keystone', 'neutron',
|
|||
|
||||
|
||||
def setup_venv(branch, novenvupdate):
|
||||
"""Uses the autohelp-wrapper script to generate a virtualenv for a given
|
||||
branch.
|
||||
"""
|
||||
"""Setup a virtual environment for `branch`."""
|
||||
dirname = os.path.join('venv', branch.replace('/', '_'))
|
||||
if novenvupdate and os.path.exists(dirname):
|
||||
return
|
||||
|
@ -46,9 +44,7 @@ def setup_venv(branch, novenvupdate):
|
|||
|
||||
|
||||
def get_options(project, branch, args):
|
||||
"""Calls the autohelp script in a venv to get the list of known
|
||||
options.
|
||||
"""
|
||||
"""Get the list of known options for a project."""
|
||||
print("Working on %(project)s (%(branch)s)" % {'project': project,
|
||||
'branch': branch})
|
||||
# Checkout the required branch
|
||||
|
@ -136,9 +132,7 @@ def dbk_append_header(parent, cells):
|
|||
|
||||
|
||||
def diff(old_list, new_list):
|
||||
"""Compare the old and new lists of options to generate lists of modified
|
||||
options.
|
||||
"""
|
||||
"""Compare the old and new lists of options."""
|
||||
new_opts = []
|
||||
changed_default = []
|
||||
deprecated_opts = []
|
||||
|
|
|
@ -15,7 +15,6 @@
|
|||
import glob
|
||||
import os
|
||||
import sys
|
||||
|
||||
from xml.dom import minidom
|
||||
import xml.sax.saxutils
|
||||
|
||||
|
@ -118,25 +117,27 @@ def new_section_file(sample, current_section):
|
|||
replace(']', '').replace(':', '-')
|
||||
+ '.xml')
|
||||
section_file = open(section_filename, 'w')
|
||||
section_file.write('<?xml version="1.0" encoding="UTF-8"?>\n\
|
||||
<!-- The tool that generated this table lives in the\n\
|
||||
openstack-doc-tools repository. The editions made in\n\
|
||||
this file will *not* be lost if you run the script again. -->\n\
|
||||
<para xmlns="http://docbook.org/ns/docbook" version="5.0">\n\
|
||||
<table rules="all">\n\
|
||||
<caption>Description of configuration options for <literal>'
|
||||
+ current_section + '</literal> in <literal>'
|
||||
+ os.path.basename(sample) +
|
||||
'</literal></caption>\n\
|
||||
<col width="50%"/>\n\
|
||||
<col width="50%"/>\n\
|
||||
<thead>\n\
|
||||
<tr>\n\
|
||||
<th>Configuration option = Default value</th>\n\
|
||||
<th>Description</th>\n\
|
||||
</tr>\n\
|
||||
</thead>\n\
|
||||
<tbody>')
|
||||
section_file.write(
|
||||
('<?xml version="1.0" encoding="UTF-8"?>\n'
|
||||
' <!-- The tool that generated this table lives in the\n'
|
||||
' openstack-doc-tools repository. The editions made in\n'
|
||||
' this file will *not* be lost if you run the script '
|
||||
'again. -->\n'
|
||||
' <para xmlns="http://docbook.org/ns/docbook" version="5.0">\n'
|
||||
' <table rules="all">\n'
|
||||
' <caption>Description of configuration options for <literal>'
|
||||
+ current_section + '</literal> in <literal>'
|
||||
+ os.path.basename(sample) +
|
||||
'</literal></caption>\n'
|
||||
' <col width="50%"/>\n'
|
||||
' <col width="50%"/>\n'
|
||||
' <thead>\n'
|
||||
' <tr>\n'
|
||||
' <th>Configuration option = Default value</th>\n'
|
||||
' <th>Description</th>\n'
|
||||
' </tr>\n'
|
||||
' </thead>\n'
|
||||
' <tbody>'))
|
||||
return section_file
|
||||
|
||||
|
||||
|
@ -167,9 +168,9 @@ def create_new_tables(repo, verbose):
|
|||
"""
|
||||
if current_section != line.strip('#').strip():
|
||||
if section_file is not None:
|
||||
section_file.write('\n </tbody>\n\
|
||||
</table>\n\
|
||||
</para>')
|
||||
section_file.write(('\n </tbody>\n'
|
||||
' </table>\n'
|
||||
' </para>'))
|
||||
section_file.close()
|
||||
current_section = line.strip('#').strip()
|
||||
section_file = new_section_file(sample, current_section)
|
||||
|
@ -201,9 +202,9 @@ def create_new_tables(repo, verbose):
|
|||
'</td><td>' + option_desc + '</td>\n' +
|
||||
' </tr>')
|
||||
if section_file is not None:
|
||||
section_file.write('\n </tbody>\n\
|
||||
</table>\n\
|
||||
</para>')
|
||||
section_file.write(('\n </tbody>\n'
|
||||
' </table>\n'
|
||||
'</para>'))
|
||||
section_file.close()
|
||||
|
||||
|
||||
|
|
|
@ -170,17 +170,17 @@ def _list_opts(obj):
|
|||
|
||||
def print_group_opts(group, opts_by_module):
|
||||
print("[%s]" % group)
|
||||
print
|
||||
print('')
|
||||
global OPTION_COUNT
|
||||
for mod, opts in opts_by_module:
|
||||
OPTION_COUNT += len(opts)
|
||||
print('#')
|
||||
print('# Options defined in %s' % mod)
|
||||
print('#')
|
||||
print
|
||||
print('')
|
||||
for opt in opts:
|
||||
_print_opt(opt)
|
||||
print
|
||||
print('')
|
||||
|
||||
|
||||
def _get_my_ip():
|
||||
|
@ -246,7 +246,7 @@ def _print_opt(opt):
|
|||
opt_default = ['']
|
||||
for default in opt_default:
|
||||
print('#%s=%s' % (opt_name, default))
|
||||
print
|
||||
print('')
|
||||
except Exception:
|
||||
sys.stderr.write('Error in option "%s"\n' % opt_name)
|
||||
sys.exit(1)
|
||||
|
|
|
@ -36,8 +36,11 @@ import yaml
|
|||
|
||||
|
||||
class DownloadRetfListingFailed(Exception):
|
||||
"""Exception will be raised when the download of the RETF
|
||||
"""Exception for failed downloads of the RETF listing.
|
||||
|
||||
Exception will be raised when the download of the RETF
|
||||
listing failed or the destination file could not be written.
|
||||
|
||||
"""
|
||||
|
||||
pass
|
||||
|
|
|
@ -13,7 +13,6 @@
|
|||
# under the License.
|
||||
|
||||
'''
|
||||
|
||||
Usage:
|
||||
test.py [path]
|
||||
|
||||
|
@ -29,7 +28,6 @@ Requires:
|
|||
- Python 2.7 or greater
|
||||
- lxml Python library
|
||||
- Maven
|
||||
|
||||
'''
|
||||
|
||||
import gzip
|
||||
|
@ -43,10 +41,10 @@ import subprocess
|
|||
import sys
|
||||
|
||||
from lxml import etree
|
||||
from oslo.config import cfg
|
||||
|
||||
import os_doc_tools
|
||||
from os_doc_tools.common import check_output # noqa
|
||||
from oslo.config import cfg
|
||||
|
||||
|
||||
# These are files that are known to not pass syntax or niceness checks
|
||||
|
@ -120,19 +118,19 @@ def get_wadl_schema():
|
|||
|
||||
|
||||
def validation_failed(schema, doc):
|
||||
"""Return True if the parsed doc fails against the schema
|
||||
"""Return True if the parsed doc fails against the schema.
|
||||
|
||||
This will ignore validation failures of the type: IDREF attribute linkend
|
||||
references an unknown ID. This is because we are validating individual
|
||||
files that are being imported, and sometimes the reference isn't present
|
||||
in the current file.
|
||||
"""
|
||||
return not schema.validate(doc) and \
|
||||
any(log.type_name != "DTD_UNKNOWN_ID" for log in schema.error_log)
|
||||
return (not schema.validate(doc) and
|
||||
any(log.type_name != "DTD_UNKNOWN_ID" for log in schema.error_log))
|
||||
|
||||
|
||||
def verify_section_tags_have_xmid(doc):
|
||||
"""Check that all section tags have an xml:id attribute
|
||||
"""Check that all section tags have an xml:id attribute.
|
||||
|
||||
Will throw an exception if there's at least one missing.
|
||||
"""
|
||||
|
@ -144,11 +142,13 @@ def verify_section_tags_have_xmid(doc):
|
|||
|
||||
|
||||
def verify_attribute_profiling(doc, attribute, known_values):
|
||||
"""Check for elements with attribute profiling set that conflicts with
|
||||
the attribute profiling of nodes below them in the DOM
|
||||
tree. This picks up cases where content is accidentally
|
||||
omitted via conflicting profiling. Checks known_values also for
|
||||
supported profiling values.
|
||||
"""Check for conflicts in attribute profiling.
|
||||
|
||||
Check for elements with attribute profiling set that conflicts with
|
||||
the attribute profiling of nodes below them in the DOM
|
||||
tree. This picks up cases where content is accidentally
|
||||
omitted via conflicting profiling. Checks known_values also for
|
||||
supported profiling values.
|
||||
"""
|
||||
|
||||
ns = {"docbook": "http://docbook.org/ns/docbook"}
|
||||
|
@ -234,8 +234,8 @@ def verify_whitespace_niceness(docfile):
|
|||
if affected_lines:
|
||||
if (msg):
|
||||
msg += "\n "
|
||||
msg += "trailing or unnecessary whitespaces found in lines: %s"\
|
||||
% (", ".join(affected_lines))
|
||||
msg += ("trailing or unnecessary whitespaces found in lines: %s"
|
||||
% (", ".join(affected_lines)))
|
||||
if tab_lines:
|
||||
if (msg):
|
||||
msg += "\n "
|
||||
|
@ -349,9 +349,7 @@ def filter_dirs(dirs):
|
|||
|
||||
|
||||
def check_deleted_files(rootdir, file_exceptions, verbose):
|
||||
"""Check whether files got deleted and verify that no other file
|
||||
references them.
|
||||
"""
|
||||
"""Checking that no removed files are referenced."""
|
||||
|
||||
print("Checking that no removed files are referenced...")
|
||||
deleted_files = get_modified_files(rootdir, "--diff-filter=D")
|
||||
|
@ -480,7 +478,7 @@ def is_testable_xml_file(path, exceptions):
|
|||
|
||||
filename = os.path.basename(path)
|
||||
return (filename.endswith('.xml') and not filename == 'pom.xml' and
|
||||
not filename in exceptions)
|
||||
filename not in exceptions)
|
||||
|
||||
|
||||
def is_testable_file(path, exceptions):
|
||||
|
@ -492,7 +490,7 @@ def is_testable_file(path, exceptions):
|
|||
filename = os.path.basename(path)
|
||||
return (filename.endswith(('.xml', '.xsd', '.xsl', '.wadl',
|
||||
'.xjb', '.json')) and
|
||||
not filename == 'pom.xml' and not filename in exceptions)
|
||||
not filename == 'pom.xml' and filename not in exceptions)
|
||||
|
||||
|
||||
def is_wadl(filename):
|
||||
|
@ -848,11 +846,14 @@ def build_book(book, publish_path, log_path):
|
|||
|
||||
|
||||
def is_book_master(filename):
|
||||
"""Returns True if filename is one of the special filenames used for the
|
||||
"""Check if a file is a book master file.
|
||||
|
||||
Returns True if filename is one of the special filenames used for the
|
||||
book master files.
|
||||
|
||||
We do not parse pom.xml for the includes directive to determine
|
||||
the top-level files and thus have to use a heuristic.
|
||||
|
||||
"""
|
||||
|
||||
return ((filename.startswith(('bk-', 'bk_', 'st-', 'api-'))
|
||||
|
|
|
@ -49,8 +49,9 @@ class IgnoreDuplicateUrls(object):
|
|||
|
||||
|
||||
class ExportSitemap(object):
|
||||
'''Write found URLs to a sitemap file, based on
|
||||
http://doc.scrapy.org/en/latest/topics/exporters.html.
|
||||
'''Write found URLs to a sitemap file.
|
||||
|
||||
Based on http://doc.scrapy.org/en/latest/topics/exporters.html.
|
||||
'''
|
||||
|
||||
def __init__(self):
|
||||
|
|
|
@ -14,11 +14,10 @@ import posixpath
|
|||
import time
|
||||
import urlparse
|
||||
|
||||
from generator import items
|
||||
from scrapy.contrib.linkextractors import sgml
|
||||
from scrapy.contrib import spiders
|
||||
|
||||
from generator import items
|
||||
|
||||
|
||||
class SitemapSpider(spiders.CrawlSpider):
|
||||
name = 'sitemap'
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
# Hacking already pins down pep8, pyflakes and flake8
|
||||
hacking>=0.8.0,<0.9
|
||||
hacking>=0.9.1,<0.10
|
||||
pylint==0.25.2
|
||||
sphinx>=1.2.1,<1.3
|
||||
|
|
Loading…
Reference in New Issue