
This enables basic clustering functionality. We add: tools/cluster/cluster/daemon.py: A server that handles validation of cluster passwords. tools/cluster/cluster/client.py: A client for this server. Important Note: This prototype does not support TLS, and the functionality in the client and server is basic. Before we roll clustering out to production, we need to have those two chat over TLS, and be much more careful about verifying credentials. Also included ... Various fixes and changes to the init script and config templates to support cluster configuration, and allow for the fact that we may have endpoint references for two network ips. Updates to snapcraft.yaml, adding the new tooling. A more formalized config infrastructure. It's still a TODO to move the specification out of the implicit definition in the install hook, and into a nice, explicit, well documented yaml file. Added nesting to the Question classes in the init script, as well as strings pointing at config keys, rather than having the config be implicitly indicated by the Question subclass' name. (This allows us to put together a config spec that doesn't require the person reading the spec to understand what Questions are, and how they are implemented.) Renamed and unified the "unit" and "lint" tox environments, to allow for the multiple Python tools that we want to lint and test. Added hooks in the init script to make it possible to do automated testing, and added an automated test for a cluster. Run with "tox -e cluster". Added cirros image to snap, to work around sporadic issues downloading it from download.cirros.net. Removed ping logic from snap, to workaround failures in gate. Need to add it back in once we fix them. Change-Id: I44ccd16168a7ed41486464df8c9e22a14d71ccfd
53 lines
1.2 KiB
Python
Executable File
53 lines
1.2 KiB
Python
Executable File
#!/usr/bin/env python
|
|
"""
|
|
control_test.py
|
|
|
|
This is a test to verify that a control node gets setup properly. We verify:
|
|
|
|
1) We can install the snap.
|
|
2) Nova services are not running
|
|
3) Other essential services are running
|
|
4) TODO: the horizon dashboard works.
|
|
|
|
"""
|
|
|
|
import sys
|
|
import os
|
|
|
|
import unittest
|
|
|
|
sys.path.append(os.getcwd())
|
|
|
|
from tests.framework import Framework, check, check_output # noqa E402
|
|
|
|
|
|
class TestControlNode(Framework):
|
|
|
|
INIT_FLAG = 'control'
|
|
|
|
def test_control_node(self):
|
|
"""A control node has all services running, so this shouldn't be any
|
|
different than our standard setup.
|
|
|
|
"""
|
|
|
|
print("Checking output of services ...")
|
|
services = check_output(
|
|
*self.PREFIX, 'systemctl', 'status', 'snap.microstack.*',
|
|
'--no-page')
|
|
|
|
print("services: @@@")
|
|
print(services)
|
|
|
|
self.assertTrue('neutron-' in services)
|
|
self.assertTrue('keystone-' in services)
|
|
|
|
self.passed = True
|
|
|
|
|
|
if __name__ == '__main__':
|
|
# Run our tests, ignoring deprecation warnings and warnings about
|
|
# unclosed sockets. (TODO: setup a selenium server so that we can
|
|
# move from PhantomJS, which is deprecated, to to Selenium headless.)
|
|
unittest.main(warnings='ignore')
|