diff --git a/.gitignore b/.gitignore
index fddd160c..11a4e84b 100644
--- a/.gitignore
+++ b/.gitignore
@@ -12,6 +12,7 @@ eggs
sdist
# Unit test / coverage reports
+.cache
.coverage
.tox
.testrepository
diff --git a/README.rst b/README.rst
index ea373ad3..08b8167e 100644
--- a/README.rst
+++ b/README.rst
@@ -8,7 +8,7 @@ Team and repository tags
.. Change things from this point on
OpenStack Doc Tools
-*******************
+~~~~~~~~~~~~~~~~~~~
This repository contains tools used by the OpenStack Documentation
project.
@@ -16,8 +16,12 @@ project.
For more details, see the `OpenStack Documentation Contributor Guide
`_.
+* License: Apache License, Version 2.0
+* Source: https://git.openstack.org/cgit/openstack/openstack-doc-tools
+* Bugs: https://bugs.launchpad.net/openstack-doc-tools
+
Prerequisites
-=============
+-------------
You need to have Python 2.7 installed for using the tools.
@@ -57,12 +61,7 @@ On Ubuntu::
$ apt-get install libxml2-dev libxslt-dev
-* License: Apache License, Version 2.0
-* Source: https://git.openstack.org/cgit/openstack/openstack-doc-tools
-* Bugs: https://bugs.launchpad.net/openstack-doc-tools
-
-
Regenerating config option tables
-=================================
+---------------------------------
See :ref:`autogenerate_config_docs`.
diff --git a/doc/source/conf.py b/doc/source/conf.py
index ce035dfb..8844bdcc 100644
--- a/doc/source/conf.py
+++ b/doc/source/conf.py
@@ -14,6 +14,8 @@
import os
import sys
+import openstackdocstheme
+
sys.path.insert(0, os.path.abspath('../..'))
# -- General configuration ----------------------------------------------------
@@ -37,7 +39,7 @@ master_doc = 'index'
# General information about the project.
project = u'openstack-doc-tools'
-copyright = u'2014, OpenStack Foundation'
+copyright = u'2017, OpenStack Foundation'
# If true, '()' will be appended to :func: etc. cross-reference text.
add_function_parentheses = True
@@ -51,10 +53,13 @@ pygments_style = 'sphinx'
# -- Options for HTML output --------------------------------------------------
-# The theme to use for HTML and HTML Help pages. Major themes that come with
-# Sphinx are currently 'default' and 'sphinxdoc'.
-# html_theme_path = ["."]
-# html_theme = '_theme'
+# The theme to use for HTML and HTML Help pages. See the documentation for
+# a list of builtin themes.
+html_theme = 'openstackdocs'
+
+# Add any paths that contain custom themes here, relative to this directory.
+html_theme_path = [openstackdocstheme.get_html_theme_path()]
+
# html_static_path = ['static']
# Output file base name for HTML help builder.
diff --git a/doc/source/readme.rst b/doc/source/doc-tools-readme.rst
similarity index 100%
rename from doc/source/readme.rst
rename to doc/source/doc-tools-readme.rst
diff --git a/doc/source/index.rst b/doc/source/index.rst
index f8133b19..0e2205b3 100644
--- a/doc/source/index.rst
+++ b/doc/source/index.rst
@@ -1,3 +1,4 @@
+==============================================
Welcome to openstack-doc-tool's documentation!
==============================================
@@ -6,16 +7,17 @@ Contents:
.. toctree::
:maxdepth: 2
- readme
- autogenerate_config_docs
- release_notes
+ doc-tools-readme
installation
usage
+ autogenerate_config_docs
+ man/openstack-doc-test
+ sitemap-readme
+ release_notes
Indices and tables
-==================
+~~~~~~~~~~~~~~~~~~
* :ref:`genindex`
* :ref:`modindex`
* :ref:`search`
-
diff --git a/doc/source/installation.rst b/doc/source/installation.rst
index fb27d945..8c21ad67 100644
--- a/doc/source/installation.rst
+++ b/doc/source/installation.rst
@@ -2,11 +2,15 @@
Installation
============
-At the command line::
+At the command line:
- $ pip install openstack-doc-tools
+ .. code-block:: console
-Or, if you have virtualenvwrapper installed::
+ $ pip install openstack-doc-tools
- $ mkvirtualenv openstack-doc-tools
- $ pip install openstack-doc-tools
+Or, if you have virtualenvwrapper installed:
+
+ .. code-block:: console
+
+ $ mkvirtualenv openstack-doc-tools
+ $ pip install openstack-doc-tools
diff --git a/doc/source/man/openstack-doc-test.rst b/doc/source/man/openstack-doc-test.rst
index 93745bcf..ab791f43 100644
--- a/doc/source/man/openstack-doc-test.rst
+++ b/doc/source/man/openstack-doc-test.rst
@@ -114,4 +114,4 @@ Bugs
* openstack-doc-tools is hosted on Launchpad so you can view current
bugs at
- `Bugs : openstack-manuals `__
+ `Bugs : openstack-doc-tools `__
diff --git a/doc/source/release_notes.rst b/doc/source/release_notes.rst
index b3ee6814..764448b9 100644
--- a/doc/source/release_notes.rst
+++ b/doc/source/release_notes.rst
@@ -1,2 +1 @@
-
.. include:: ../../RELEASE_NOTES.rst
diff --git a/doc/source/sitemap-readme.rst b/doc/source/sitemap-readme.rst
new file mode 100644
index 00000000..8038a36a
--- /dev/null
+++ b/doc/source/sitemap-readme.rst
@@ -0,0 +1 @@
+.. include:: ../../sitemap/README.rst
diff --git a/doc/source/usage.rst b/doc/source/usage.rst
index c1779bd1..61126a51 100644
--- a/doc/source/usage.rst
+++ b/doc/source/usage.rst
@@ -1,7 +1,9 @@
-========
+=====
Usage
-========
+=====
-To use openstack-doc-tools in a project::
+To use openstack-doc-tools in a project:
- import os_doc_tools
+ .. code-block:: python
+
+ import os_doc_tools
diff --git a/sitemap/README.rst b/sitemap/README.rst
index 925c6ca1..79e50e23 100644
--- a/sitemap/README.rst
+++ b/sitemap/README.rst
@@ -2,46 +2,80 @@
Sitemap Generator
=================
-This script crawls all available sites on http://docs.openstack.org and extracts
-all URLs. Based on the URLs the script generates a sitemap for search engines
-according to the protocol described at http://www.sitemaps.org/protocol.html.
+This script crawls all available sites on http://docs.openstack.org and
+extracts all URLs. Based on the URLs the script generates a sitemap for search
+engines according to the `sitemaps protocol
+`_.
Installation
-============
+~~~~~~~~~~~~
-To install the needed modules you can use pip or the package management system included
-in your distribution. When using the package management system maybe the name of the
-packages differ. Installation in a virtual environment is recommended.
+To install the needed modules you can use pip or the package management system
+included in your distribution. When using the package management system maybe
+the name of the packages differ. Installation in a virtual environment is
+recommended.
- $ virtualenv venv
- $ source venv/bin/activate
- $ pip install -r requirements.txt
+.. code-block:: console
-When using pip it's maybe necessary to install some development packages.
-For example on Ubuntu 16.04 install the following packages.
+ $ virtualenv venv
+ $ source venv/bin/activate
+ $ pip install -r requirements.txt
- $ sudo apt install gcc libssl-dev python-dev python-virtualenv
+When using pip, you may also need to install some development packages. For
+example, on Ubuntu 16.04 install the following packages:
+
+.. code-block:: console
+
+ $ sudo apt install gcc libssl-dev python-dev python-virtualenv
Usage
-=====
+~~~~~
-To generate a new sitemap file simply run the spider using the
-following command. It will take several minutes to crawl all available sites
-on http://docs.openstack.org. The result will be available in the file
-``sitemap_docs.openstack.org.xml``.
+To generate a new sitemap file, change into your local clone of the
+``openstack/openstack-doc-tools`` repository and run the following commands:
- $ scrapy crawl sitemap
+.. code-block:: console
-It's also possible to crawl other sites using the attribute ``domain``.
+ $ cd sitemap
+ $ scrapy crawl sitemap
-For example to crawl http://developer.openstack.org use the following command.
-The result will be available in the file ``sitemap_developer.openstack.org.xml``.
+The script takes several minutes to crawl all available
+sites on http://docs.openstack.org. The result is available in the
+``sitemap_docs.openstack.org.xml`` file.
- $ scrapy crawl sitemap -a domain=developer.openstack.org
+Options
+~~~~~~~
-To write log messages into a file append the parameter ``-s LOG_FILE=scrapy.log``.
+domain=URL
-It is possible to define a set of additional start URLs using the attribute
-``urls``. Separate multiple URLs with ``,``.
+ Sets the ``domain`` to crawl. Default is ``docs.openstack.org``.
- $ scrapy crawl sitemap -a domain=developer.openstack.org -a urls="http://developer.openstack.org/de/api-guide/quick-start/"
+ For example, to crawl http://developer.openstack.org use the following
+ command:
+
+ .. code-block:: console
+
+ $ scrapy crawl sitemap -a domain=developer.openstack.org
+
+ The result is available in the ``sitemap_developer.openstack.org.xml`` file.
+
+urls=URL
+
+ You can define a set of additional start URLs using the ``urls`` attribute.
+ Separate multiple URLs with ``,``.
+
+ For example:
+
+ .. code-block:: console
+
+ $ scrapy crawl sitemap -a domain=developer.openstack.org -a urls="http://developer.openstack.org/de/api-guide/quick-start/"
+
+LOG_FILE=FILE
+
+ Write log messages to the specified file.
+
+ For example, to write to ``scrapy.log``:
+
+ .. code-block:: console
+
+ $ scrapy crawl sitemap -s LOG_FILE=scrapy.log
diff --git a/sitemap/generator/pipelines.py b/sitemap/generator/pipelines.py
index f06d28a1..50f385b6 100644
--- a/sitemap/generator/pipelines.py
+++ b/sitemap/generator/pipelines.py
@@ -69,7 +69,7 @@ class ExportSitemap(object):
def spider_opened(self, spider):
output = open(os.path.join(os.getcwd(), 'sitemap_%s.xml'
- % spider.domain), 'w')
+ % spider.domain), 'w')
self.files[spider] = output
self.exporter = SitemapItemExporter(output, item_element='url',
root_element='urlset')
@@ -80,7 +80,7 @@ class ExportSitemap(object):
output = self.files.pop(spider)
output.close()
tree = lxml.etree.parse(os.path.join(os.getcwd(), "sitemap_%s.xml"
- % spider.domain))
+ % spider.domain))
with open(os.path.join(os.getcwd(), "sitemap_%s.xml" % spider.domain),
'w') as pretty:
pretty.write(lxml.etree.tostring(tree, pretty_print=True))
diff --git a/sitemap/generator/spiders/sitemap_file.py b/sitemap/generator/spiders/sitemap_file.py
index 9d895192..a81ca7f9 100644
--- a/sitemap/generator/spiders/sitemap_file.py
+++ b/sitemap/generator/spiders/sitemap_file.py
@@ -11,7 +11,10 @@
# under the License.
import time
-import urlparse
+try:
+ import urlparse
+except ImportError:
+ import urllib.parse as urlparse
from scrapy import item
from scrapy.linkextractors import LinkExtractor
@@ -41,7 +44,8 @@ class SitemapSpider(spiders.CrawlSpider):
'juno',
'kilo',
'liberty',
- 'mitaka'
+ 'mitaka',
+ 'newton'
]])
rules = [
diff --git a/sitemap/requirements.txt b/sitemap/requirements.txt
deleted file mode 100644
index dc264785..00000000
--- a/sitemap/requirements.txt
+++ /dev/null
@@ -1 +0,0 @@
-scrapy>=1.0.0
diff --git a/sitemap/test/__init__.py b/sitemap/test/__init__.py
deleted file mode 100644
index e69de29b..00000000
diff --git a/sitemap/test/generator/test_items.py b/sitemap/test/generator/test_items.py
deleted file mode 100644
index aab71f11..00000000
--- a/sitemap/test/generator/test_items.py
+++ /dev/null
@@ -1,37 +0,0 @@
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-import mock
-from sitemap.generator import items
-import unittest
-
-
-class TestSitemapItem(unittest.TestCase):
-
- def test_class_type(self):
- self.assertTrue(type(items.SitemapItem) is items.scrapy.item.ItemMeta)
-
- def test_class_supports_fields(self):
- with mock.patch.object(items.scrapy.item, 'Field'):
- a = items.SitemapItem()
-
- supported_fields = ['loc', 'lastmod', 'priority', 'changefreq']
- for field in supported_fields:
- a[field] = field
-
- not_supported_fields = ['some', 'random', 'fields']
- for field in not_supported_fields:
- with self.assertRaises(KeyError):
- a[field] = field
-
-if __name__ == '__main__':
- unittest.main()
diff --git a/test-requirements.txt b/test-requirements.txt
index e6a71cda..56ea1b5b 100644
--- a/test-requirements.txt
+++ b/test-requirements.txt
@@ -11,8 +11,8 @@ doc8 # Apache-2.0
pylint==1.4.5 # GPLv2
reno>=1.8.0 # Apache-2.0
+openstackdocstheme>=1.5.0 # Apache-2.0
oslosphinx>=4.7.0 # Apache-2.0
-
testrepository>=0.0.18 # Apache-2.0/BSD
# mock object framework
diff --git a/os_doc_tools/test/__init__.py b/test/__init__.py
similarity index 100%
rename from os_doc_tools/test/__init__.py
rename to test/__init__.py
diff --git a/os_doc_tools/test/test_index.py b/test/test_index.py
similarity index 100%
rename from os_doc_tools/test/test_index.py
rename to test/test_index.py
diff --git a/os_doc_tools/test/test_jsoncheck.py b/test/test_jsoncheck.py
similarity index 100%
rename from os_doc_tools/test/test_jsoncheck.py
rename to test/test_jsoncheck.py
diff --git a/sitemap/test/generator/test_pipelines.py b/test/test_pipelines.py
similarity index 93%
rename from sitemap/test/generator/test_pipelines.py
rename to test/test_pipelines.py
index 30b71267..62fde0ec 100644
--- a/sitemap/test/generator/test_pipelines.py
+++ b/test/test_pipelines.py
@@ -78,26 +78,22 @@ class TestExportSitemap(unittest.TestCase):
def test_spider_opened_calls_open(self):
with mock.patch.object(pipelines, 'open',
return_value=None) as mocked_open:
- with mock.patch.object(pipelines,
- 'SitemapItemExporter'):
+ with mock.patch.object(pipelines, 'SitemapItemExporter'):
self.export_sitemap.spider_opened(self.spider)
self.assertTrue(mocked_open.called)
def test_spider_opened_assigns_spider(self):
prev_len = len(self.export_sitemap.files)
- with mock.patch.object(pipelines, 'open',
- return_value=None):
- with mock.patch.object(pipelines,
- 'SitemapItemExporter'):
+ with mock.patch.object(pipelines, 'open', return_value=None):
+ with mock.patch.object(pipelines, 'SitemapItemExporter'):
self.export_sitemap.spider_opened(self.spider)
after_len = len(self.export_sitemap.files)
self.assertTrue(after_len - prev_len, 1)
def test_spider_opened_instantiates_exporter(self):
- with mock.patch.object(pipelines, 'open',
- return_value=None):
+ with mock.patch.object(pipelines, 'open', return_value=None):
with mock.patch.object(pipelines,
'SitemapItemExporter') as mocked_exporter:
self.export_sitemap.spider_opened(self.spider)
@@ -105,8 +101,7 @@ class TestExportSitemap(unittest.TestCase):
self.assertTrue(mocked_exporter.called)
def test_spider_opened_exporter_starts_exporting(self):
- with mock.patch.object(pipelines, 'open',
- return_value=None):
+ with mock.patch.object(pipelines, 'open', return_value=None):
with mock.patch.object(pipelines.SitemapItemExporter,
'start_exporting') as mocked_start:
self.export_sitemap.spider_opened(self.spider)
diff --git a/sitemap/test/generator/spiders/test_sitemap_file.py b/test/test_sitemap_file.py
similarity index 79%
rename from sitemap/test/generator/spiders/test_sitemap_file.py
rename to test/test_sitemap_file.py
index ea929452..98da8748 100644
--- a/sitemap/test/generator/spiders/test_sitemap_file.py
+++ b/test/test_sitemap_file.py
@@ -11,10 +11,30 @@
# under the License.
import mock
+import scrapy
from sitemap.generator.spiders import sitemap_file
import unittest
+class TestSitemapItem(unittest.TestCase):
+
+ def test_class_type(self):
+ self.assertTrue(type(sitemap_file.SitemapItem) is scrapy.item.ItemMeta)
+
+ def test_class_supports_fields(self):
+ with mock.patch.object(scrapy.item, 'Field'):
+ a = sitemap_file.SitemapItem()
+
+ supported_fields = ['loc', 'lastmod', 'priority', 'changefreq']
+ for field in supported_fields:
+ a[field] = field
+
+ not_supported_fields = ['some', 'random', 'fields']
+ for field in not_supported_fields:
+ with self.assertRaises(KeyError):
+ a[field] = field
+
+
class TestSitemapSpider(unittest.TestCase):
def setUp(self):
@@ -38,16 +58,18 @@ class TestSitemapSpider(unittest.TestCase):
def test_parse_items_inits_sitemap(self):
response = mock.MagicMock()
- with mock.patch.object(sitemap_file.items,
+ with mock.patch.object(sitemap_file,
'SitemapItem') as mocked_sitemap_item:
- with mock.patch.object(sitemap_file, 'time'):
- self.spider.parse_item(response)
+ with mock.patch.object(sitemap_file.urlparse,
+ 'urlsplit'):
+ with mock.patch.object(sitemap_file, 'time'):
+ self.spider.parse_item(response)
self.assertTrue(mocked_sitemap_item.called)
def test_parse_items_gets_path(self):
response = mock.MagicMock()
- with mock.patch.object(sitemap_file.items, 'SitemapItem'):
+ with mock.patch.object(sitemap_file, 'SitemapItem'):
with mock.patch.object(sitemap_file.urlparse,
'urlsplit') as mocked_urlsplit:
with mock.patch.object(sitemap_file, 'time'):
@@ -60,7 +82,7 @@ class TestSitemapSpider(unittest.TestCase):
path = sitemap_file.urlparse.SplitResult(
scheme='https',
netloc='docs.openstack.com',
- path='/kilo',
+ path='/mitaka',
query='',
fragment=''
)
@@ -77,7 +99,7 @@ class TestSitemapSpider(unittest.TestCase):
path = sitemap_file.urlparse.SplitResult(
scheme='https',
netloc='docs.openstack.com',
- path='/mitaka',
+ path='/ocata',
query='',
fragment=''
)
@@ -94,7 +116,7 @@ class TestSitemapSpider(unittest.TestCase):
path = sitemap_file.urlparse.SplitResult(
scheme='https',
netloc='docs.openstack.com',
- path='/mitaka',
+ path='/ocata',
query='',
fragment=''
)
diff --git a/tox.ini b/tox.ini
index 68357797..a0f8924f 100644
--- a/tox.ini
+++ b/tox.ini
@@ -1,6 +1,6 @@
[tox]
minversion = 2.0
-envlist = py27,pep8
+envlist = py3,py27,pep8
skipsdist = True
[testenv]
@@ -9,7 +9,10 @@ install_command = {toxinidir}/tools/tox_install.sh {env:UPPER_CONSTRAINTS_FILE:h
setenv =
VIRTUAL_ENV={envdir}
CLIENT_NAME=openstack-doc-tools
-deps = -r{toxinidir}/test-requirements.txt
+# Install also sitemap scraping tool, not installed by default
+# therefore not in requirements file
+deps = scrapy>=1.0.0
+ -r{toxinidir}/test-requirements.txt
-r{toxinidir}/requirements.txt
commands = python setup.py testr --slowest --testr-args='{posargs}'
@@ -27,11 +30,14 @@ commands =
cleanup/remove_trailing_whitespaces.sh
[testenv:pylint]
-commands = pylint os_doc_tools cleanup
+commands = pylint os_doc_tools cleanup sitemap
[testenv:releasenotes]
commands = sphinx-build -a -E -W -d releasenotes/build/doctrees -b html releasenotes/source releasenotes/build/html
+[testenv:sitemap]
+# commands = functional test command goes here
+
[testenv:venv]
commands = {posargs}
@@ -44,3 +50,4 @@ builtins = _
exclude=.venv,.git,.tox,dist,*lib/python*,*egg,build,*autogenerate_config_docs/venv,*autogenerate_config_docs/sources
# 28 is currently the most complex thing we have
max-complexity=29
+ignore = H101