Use SSHManager instead of context manager
- Function update_connection was added in SSHManager.
You can use it to update existed connecetion with
new/custom credentials
- Function execute_on_remote was renamed to 'execut'
- Function execute_on_remote was added to SSHManager
with new functionality. It is the same with
run_on_remote_and_get_result
- Move function json_deserialize to SSHManager
- Code refactoring according the changes above
SSHManager was added in folowing places:
- fuelweb_test.environment.py for all needed methods
- patching.py for:
- connect_admin_to_repo
- connect_slaves_to_repo
- decorators.py for update_rpm_packages
- multiple_networks_hacks.py for all
Related-Bug: #1527847
Change-Id: I20dd5e02bb8833b3543780e0083576516a2893f6
This commit is contained in:
committed by
tatyana-leontovich
parent
e29fde0f96
commit
bc4bc66999
@@ -33,6 +33,7 @@ from proboscis.asserts import assert_equal
|
|||||||
|
|
||||||
from fuelweb_test import logger
|
from fuelweb_test import logger
|
||||||
from fuelweb_test import settings
|
from fuelweb_test import settings
|
||||||
|
from fuelweb_test.helpers.ssh_manager import SSHManager
|
||||||
from fuelweb_test.settings import MASTER_IS_CENTOS7
|
from fuelweb_test.settings import MASTER_IS_CENTOS7
|
||||||
from fuelweb_test.helpers.regenerate_repo import CustomRepo
|
from fuelweb_test.helpers.regenerate_repo import CustomRepo
|
||||||
from fuelweb_test.helpers.utils import get_current_env
|
from fuelweb_test.helpers.utils import get_current_env
|
||||||
@@ -209,19 +210,26 @@ def update_rpm_packages(func):
|
|||||||
cmd = ("echo -e '[temporary]\nname=temporary\nbaseurl=file://{0}/"
|
cmd = ("echo -e '[temporary]\nname=temporary\nbaseurl=file://{0}/"
|
||||||
"\ngpgcheck=0\npriority=1' > {1}").format(
|
"\ngpgcheck=0\npriority=1' > {1}").format(
|
||||||
settings.LOCAL_MIRROR_CENTOS, conf_file)
|
settings.LOCAL_MIRROR_CENTOS, conf_file)
|
||||||
with environment.d_env.get_admin_remote() as remote:
|
|
||||||
environment.execute_remote_cmd(remote, cmd, exit_code=0)
|
SSHManager().execute_on_remote(
|
||||||
|
ip=SSHManager().admin_ip,
|
||||||
|
cmd=cmd
|
||||||
|
)
|
||||||
update_command = 'yum clean expire-cache; yum update -y -d3'
|
update_command = 'yum clean expire-cache; yum update -y -d3'
|
||||||
result = remote.execute(update_command)
|
SSHManager().execute(
|
||||||
|
ip=SSHManager().admin_ip,
|
||||||
|
cmd=update_command
|
||||||
|
)
|
||||||
logger.debug('Result of "yum update" command on master node: '
|
logger.debug('Result of "yum update" command on master node: '
|
||||||
'{0}'.format(result))
|
'{0}'.format(result))
|
||||||
assert_equal(int(result['exit_code']), 0,
|
assert_equal(int(result['exit_code']), 0,
|
||||||
'Packages update failed, '
|
'Packages update failed, '
|
||||||
'inspect logs for details')
|
'inspect logs for details')
|
||||||
environment.execute_remote_cmd(remote,
|
|
||||||
cmd='rm -f {0}'
|
SSHManager().execute_on_remote(
|
||||||
.format(conf_file),
|
ip=SSHManager().admin_ip,
|
||||||
exit_code=0)
|
cmd='rm -f {0}'.format(conf_file)
|
||||||
|
)
|
||||||
except Exception:
|
except Exception:
|
||||||
logger.error("Could not update packages")
|
logger.error("Could not update packages")
|
||||||
raise
|
raise
|
||||||
|
|||||||
@@ -64,7 +64,7 @@ class BaseActions(object):
|
|||||||
if stdin is not None:
|
if stdin is not None:
|
||||||
cmd = 'echo "{0}" | {1}'.format(stdin, cmd)
|
cmd = 'echo "{0}" | {1}'.format(stdin, cmd)
|
||||||
|
|
||||||
result = self.ssh_manager.execute_on_remote(
|
result = self.ssh_manager.execute(
|
||||||
ip=self.admin_ip,
|
ip=self.admin_ip,
|
||||||
cmd=cmd
|
cmd=cmd
|
||||||
)
|
)
|
||||||
@@ -98,7 +98,7 @@ class BaseActions(object):
|
|||||||
Standard output from console
|
Standard output from console
|
||||||
"""
|
"""
|
||||||
cmd = 'dockerctl copy {0} {1}'.format(copy_from, copy_to)
|
cmd = 'dockerctl copy {0} {1}'.format(copy_from, copy_to)
|
||||||
result = self.ssh_manager.execute_on_remote(
|
result = self.ssh_manager.execute(
|
||||||
ip=self.admin_ip,
|
ip=self.admin_ip,
|
||||||
cmd=cmd
|
cmd=cmd
|
||||||
)
|
)
|
||||||
@@ -113,7 +113,7 @@ class BaseActions(object):
|
|||||||
|
|
||||||
@property
|
@property
|
||||||
def is_container_ready(self):
|
def is_container_ready(self):
|
||||||
result = self.ssh_manager.execute_on_remote(
|
result = self.ssh_manager.execute(
|
||||||
ip=self.admin_ip,
|
ip=self.admin_ip,
|
||||||
cmd="timeout 5 dockerctl check {0}".format(self.container)
|
cmd="timeout 5 dockerctl check {0}".format(self.container)
|
||||||
)
|
)
|
||||||
@@ -286,7 +286,7 @@ class AdminActions(BaseActions):
|
|||||||
# for admin node
|
# for admin node
|
||||||
cmd = 'ntpdate -p 4 -t 0.2 -ub {0}'.format(router)
|
cmd = 'ntpdate -p 4 -t 0.2 -ub {0}'.format(router)
|
||||||
|
|
||||||
if not self.ssh_manager.execute_on_remote(ip=self.admin_ip,
|
if not self.ssh_manager.execute(ip=self.admin_ip,
|
||||||
cmd=cmd)['exit_code']:
|
cmd=cmd)['exit_code']:
|
||||||
# Local ntpd on the host is alive, so
|
# Local ntpd on the host is alive, so
|
||||||
# remove all NTP sources and add the host instead.
|
# remove all NTP sources and add the host instead.
|
||||||
@@ -342,7 +342,7 @@ class AdminActions(BaseActions):
|
|||||||
|
|
||||||
@logwrap
|
@logwrap
|
||||||
def clean_generated_image(self, distro):
|
def clean_generated_image(self, distro):
|
||||||
out = self.ssh_manager.execute_on_remote(
|
out = self.ssh_manager.execute(
|
||||||
ip=self.admin_ip,
|
ip=self.admin_ip,
|
||||||
cmd="find /var/www/nailgun/targetimages/ -name "
|
cmd="find /var/www/nailgun/targetimages/ -name "
|
||||||
"'env*{}*' -printf '%P\n'".format(distro.lower())
|
"'env*{}*' -printf '%P\n'".format(distro.lower())
|
||||||
@@ -350,7 +350,7 @@ class AdminActions(BaseActions):
|
|||||||
images = ''.join(out)
|
images = ''.join(out)
|
||||||
|
|
||||||
logger.debug("images are {}".format(images))
|
logger.debug("images are {}".format(images))
|
||||||
self.ssh_manager.execute_on_remote(
|
self.ssh_manager.execute(
|
||||||
ip=self.admin_ip,
|
ip=self.admin_ip,
|
||||||
cmd="find /var/www/nailgun/targetimages/ -name 'env*{}*'"
|
cmd="find /var/www/nailgun/targetimages/ -name 'env*{}*'"
|
||||||
" -delete".format(distro.lower())
|
" -delete".format(distro.lower())
|
||||||
@@ -362,7 +362,7 @@ class AdminActions(BaseActions):
|
|||||||
logger.info('Unpacking file')
|
logger.info('Unpacking file')
|
||||||
filename, ext = os.path.splitext(name)
|
filename, ext = os.path.splitext(name)
|
||||||
cmd = "tar -xpvf" if ext.endswith("tar") else "lrzuntar"
|
cmd = "tar -xpvf" if ext.endswith("tar") else "lrzuntar"
|
||||||
result = self.ssh_manager.execute_on_remote(
|
result = self.ssh_manager.execute(
|
||||||
ip=node_ip,
|
ip=node_ip,
|
||||||
cmd='cd {0} && {2} {1}'.format(path, name, cmd)
|
cmd='cd {0} && {2} {1}'.format(path, name, cmd)
|
||||||
)
|
)
|
||||||
@@ -407,7 +407,7 @@ class AdminActions(BaseActions):
|
|||||||
|
|
||||||
def get_fuel_settings(self):
|
def get_fuel_settings(self):
|
||||||
cmd = 'cat {cfg_file}'.format(cfg_file=hlp_data.FUEL_SETTINGS_YAML)
|
cmd = 'cat {cfg_file}'.format(cfg_file=hlp_data.FUEL_SETTINGS_YAML)
|
||||||
result = self.ssh_manager.execute_on_remote(
|
result = self.ssh_manager.execute(
|
||||||
ip=self.admin_ip,
|
ip=self.admin_ip,
|
||||||
cmd=cmd
|
cmd=cmd
|
||||||
)
|
)
|
||||||
@@ -424,7 +424,7 @@ class AdminActions(BaseActions):
|
|||||||
default_style='"',
|
default_style='"',
|
||||||
default_flow_style=False),
|
default_flow_style=False),
|
||||||
hlp_data.FUEL_SETTINGS_YAML)
|
hlp_data.FUEL_SETTINGS_YAML)
|
||||||
result = self.ssh_manager.execute_on_remote(
|
result = self.ssh_manager.execute(
|
||||||
ip=self.admin_ip,
|
ip=self.admin_ip,
|
||||||
cmd=cmd
|
cmd=cmd
|
||||||
)
|
)
|
||||||
@@ -666,7 +666,7 @@ class DockerActions(object):
|
|||||||
self.ssh_manager = SSHManager()
|
self.ssh_manager = SSHManager()
|
||||||
|
|
||||||
def list_containers(self):
|
def list_containers(self):
|
||||||
result = self.ssh_manager.execute_on_remote(
|
result = self.ssh_manager.execute(
|
||||||
ip=self.ssh_manager.admin_ip,
|
ip=self.ssh_manager.admin_ip,
|
||||||
cmd='dockerctl list'
|
cmd='dockerctl list'
|
||||||
)
|
)
|
||||||
@@ -692,7 +692,7 @@ class DockerActions(object):
|
|||||||
.format(failed_containers, timeout))
|
.format(failed_containers, timeout))
|
||||||
|
|
||||||
def restart_container(self, container):
|
def restart_container(self, container):
|
||||||
self.ssh_manager.execute_on_remote(
|
self.ssh_manager.execute(
|
||||||
ip=self.ssh_manager.admin_ip,
|
ip=self.ssh_manager.admin_ip,
|
||||||
cmd='dockerctl restart {0}'.format(container)
|
cmd='dockerctl restart {0}'.format(container)
|
||||||
)
|
)
|
||||||
@@ -706,7 +706,7 @@ class DockerActions(object):
|
|||||||
|
|
||||||
def execute_in_containers(self, cmd):
|
def execute_in_containers(self, cmd):
|
||||||
for container in self.list_containers():
|
for container in self.list_containers():
|
||||||
self.ssh_manager.execute_on_remote(
|
self.ssh_manager.execute(
|
||||||
ip=self.ssh_manager.admin_ip,
|
ip=self.ssh_manager.admin_ip,
|
||||||
cmd="dockerctl shell {0} bash -c '{1}'".format(container, cmd)
|
cmd="dockerctl shell {0} bash -c '{1}'".format(container, cmd)
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -22,10 +22,11 @@
|
|||||||
from proboscis.asserts import assert_equal
|
from proboscis.asserts import assert_equal
|
||||||
|
|
||||||
from fuelweb_test import logwrap
|
from fuelweb_test import logwrap
|
||||||
|
from fuelweb_test.helpers.ssh_manager import SSHManager
|
||||||
|
|
||||||
|
|
||||||
@logwrap
|
@logwrap
|
||||||
def configure_second_admin_dhcp(remote, interface):
|
def configure_second_admin_dhcp(ip, interface):
|
||||||
dhcp_conf_file = '/etc/cobbler/dnsmasq.template'
|
dhcp_conf_file = '/etc/cobbler/dnsmasq.template'
|
||||||
docker_start_file = '/usr/local/bin/start.sh'
|
docker_start_file = '/usr/local/bin/start.sh'
|
||||||
cmd = ("dockerctl shell cobbler sed '/^interface/a interface={0}' -i {1};"
|
cmd = ("dockerctl shell cobbler sed '/^interface/a interface={0}' -i {1};"
|
||||||
@@ -34,13 +35,16 @@ def configure_second_admin_dhcp(remote, interface):
|
|||||||
"dockerctl shell cobbler cobbler sync").format(interface,
|
"dockerctl shell cobbler cobbler sync").format(interface,
|
||||||
dhcp_conf_file,
|
dhcp_conf_file,
|
||||||
docker_start_file)
|
docker_start_file)
|
||||||
result = remote.execute(cmd)
|
result = SSHManager().execute(
|
||||||
|
ip=ip,
|
||||||
|
cmd=cmd
|
||||||
|
)
|
||||||
assert_equal(result['exit_code'], 0, ('Failed to add second admin '
|
assert_equal(result['exit_code'], 0, ('Failed to add second admin '
|
||||||
'network to DHCP server: {0}').format(result))
|
'network to DHCP server: {0}').format(result))
|
||||||
|
|
||||||
|
|
||||||
@logwrap
|
@logwrap
|
||||||
def configure_second_admin_firewall(remote, network, netmask, interface,
|
def configure_second_admin_firewall(ip, network, netmask, interface,
|
||||||
master_ip):
|
master_ip):
|
||||||
# Allow input/forwarding for nodes from the second admin network and
|
# Allow input/forwarding for nodes from the second admin network and
|
||||||
# enable source NAT for UDP (tftp) and HTTP (proxy server) traffic
|
# enable source NAT for UDP (tftp) and HTTP (proxy server) traffic
|
||||||
@@ -63,15 +67,20 @@ def configure_second_admin_firewall(remote, network, netmask, interface,
|
|||||||
|
|
||||||
for rule in rules:
|
for rule in rules:
|
||||||
cmd = 'iptables {0}'.format(rule)
|
cmd = 'iptables {0}'.format(rule)
|
||||||
result = remote.execute(cmd)
|
result = SSHManager().execute(
|
||||||
|
ip=ip,
|
||||||
|
cmd=cmd
|
||||||
|
)
|
||||||
assert_equal(result['exit_code'], 0,
|
assert_equal(result['exit_code'], 0,
|
||||||
('Failed to add firewall rule for second admin net '
|
('Failed to add firewall rule for second admin net '
|
||||||
'on master node: {0}, {1}').format(rule, result))
|
'on master node: {0}, {1}').format(rule, result))
|
||||||
|
|
||||||
# Save new firewall configuration
|
# Save new firewall configuration
|
||||||
cmd = 'service iptables save'
|
cmd = 'service iptables save'
|
||||||
result = remote.execute(cmd)
|
result = SSHManager().execute(
|
||||||
|
ip=ip,
|
||||||
|
cmd=cmd
|
||||||
|
)
|
||||||
assert_equal(result['exit_code'], 0,
|
assert_equal(result['exit_code'], 0,
|
||||||
('Failed to save firewall configuration on master node:'
|
('Failed to save firewall configuration on master node:'
|
||||||
' {0}').format(result))
|
' {0}').format(result))
|
||||||
|
|||||||
@@ -30,7 +30,7 @@ from proboscis.asserts import assert_true
|
|||||||
|
|
||||||
from fuelweb_test import logger
|
from fuelweb_test import logger
|
||||||
from fuelweb_test import settings
|
from fuelweb_test import settings
|
||||||
|
from fuelweb_test.helpers.ssh_manager import SSHManager
|
||||||
|
|
||||||
patching_validation_schema = {
|
patching_validation_schema = {
|
||||||
'type': {
|
'type': {
|
||||||
@@ -304,9 +304,11 @@ def connect_slaves_to_repo(environment, nodes, repo_name):
|
|||||||
]
|
]
|
||||||
|
|
||||||
for slave in nodes:
|
for slave in nodes:
|
||||||
with environment.d_env.get_ssh_to_remote(slave['ip']) as remote:
|
|
||||||
for cmd in cmds:
|
for cmd in cmds:
|
||||||
environment.execute_remote_cmd(remote, cmd, exit_code=0)
|
SSHManager().execute_on_remote(
|
||||||
|
ip=slave['ip'],
|
||||||
|
cmd=cmd
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def connect_admin_to_repo(environment, repo_name):
|
def connect_admin_to_repo(environment, repo_name):
|
||||||
@@ -328,9 +330,11 @@ def connect_admin_to_repo(environment, repo_name):
|
|||||||
"yum check-update; [[ $? -eq 100 || $? -eq 0 ]]"
|
"yum check-update; [[ $? -eq 100 || $? -eq 0 ]]"
|
||||||
]
|
]
|
||||||
|
|
||||||
with environment.d_env.get_admin_remote() as remote:
|
|
||||||
for cmd in cmds:
|
for cmd in cmds:
|
||||||
environment.execute_remote_cmd(remote, cmd, exit_code=0)
|
SSHManager().execute_on_remote(
|
||||||
|
ip=SSHManager().admin_ip,
|
||||||
|
cmd=cmd
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def update_packages(environment, remote, packages, exclude_packages=None):
|
def update_packages(environment, remote, packages, exclude_packages=None):
|
||||||
|
|||||||
@@ -246,7 +246,7 @@ class CustomRepo(object):
|
|||||||
.format(pkgs_local_path + path_suff,
|
.format(pkgs_local_path + path_suff,
|
||||||
self.custom_pkgs_mirror,
|
self.custom_pkgs_mirror,
|
||||||
pkg["filename:"])
|
pkg["filename:"])
|
||||||
wget_result = self.ssh_manager.execute_on_remote(
|
wget_result = self.ssh_manager.execute(
|
||||||
ip=self.ip,
|
ip=self.ip,
|
||||||
cmd=wget_cmd
|
cmd=wget_cmd
|
||||||
)
|
)
|
||||||
@@ -278,7 +278,7 @@ class CustomRepo(object):
|
|||||||
regenerate_script,
|
regenerate_script,
|
||||||
local_mirror_path,
|
local_mirror_path,
|
||||||
self.ubuntu_release)
|
self.ubuntu_release)
|
||||||
script_result = self.ssh_manager.execute_on_remote(
|
script_result = self.ssh_manager.execute(
|
||||||
ip=self.ip,
|
ip=self.ip,
|
||||||
cmd=script_cmd
|
cmd=script_cmd
|
||||||
)
|
)
|
||||||
@@ -316,7 +316,7 @@ class CustomRepo(object):
|
|||||||
cmd = ('fgrep -h -e " Depends: " -e "{0}" -e "{1}" '
|
cmd = ('fgrep -h -e " Depends: " -e "{0}" -e "{1}" '
|
||||||
'/var/log/docker-logs/remote/node-*/'
|
'/var/log/docker-logs/remote/node-*/'
|
||||||
'puppet*.log'.format(err_start, err_end))
|
'puppet*.log'.format(err_start, err_end))
|
||||||
result = self.ssh_manager.execute_on_remote(
|
result = self.ssh_manager.execute(
|
||||||
ip=self.ip,
|
ip=self.ip,
|
||||||
cmd=cmd
|
cmd=cmd
|
||||||
)['stdout']
|
)['stdout']
|
||||||
@@ -356,7 +356,7 @@ class CustomRepo(object):
|
|||||||
|
|
||||||
cmd = ('fgrep -h -e "Error: Package: " -e " Requires: " /var/log/'
|
cmd = ('fgrep -h -e "Error: Package: " -e " Requires: " /var/log/'
|
||||||
'docker-logs/remote/node-*/puppet*.log')
|
'docker-logs/remote/node-*/puppet*.log')
|
||||||
result = self.ssh_manager.execute_on_remote(
|
result = self.ssh_manager.execute(
|
||||||
ip=self.ip,
|
ip=self.ip,
|
||||||
cmd=cmd
|
cmd=cmd
|
||||||
)['stdout']
|
)['stdout']
|
||||||
|
|||||||
@@ -15,6 +15,7 @@
|
|||||||
import os
|
import os
|
||||||
import posixpath
|
import posixpath
|
||||||
import re
|
import re
|
||||||
|
import json
|
||||||
|
|
||||||
from paramiko import RSAKey
|
from paramiko import RSAKey
|
||||||
from devops.models.node import SSHClient
|
from devops.models.node import SSHClient
|
||||||
@@ -104,10 +105,112 @@ class SSHManager(object):
|
|||||||
logger.debug('SSH_MANAGER: Connections {0}'.format(self.connections))
|
logger.debug('SSH_MANAGER: Connections {0}'.format(self.connections))
|
||||||
return self._connect(self.connections[(ip, port)])
|
return self._connect(self.connections[(ip, port)])
|
||||||
|
|
||||||
def execute_on_remote(self, ip, cmd, port=22):
|
def update_connection(self, ip, login=None, password=None,
|
||||||
|
keys=None, port=22):
|
||||||
|
"""Update existed connection
|
||||||
|
|
||||||
|
:param ip: host ip string
|
||||||
|
:param login: login string
|
||||||
|
:param password: password string
|
||||||
|
:param keys: list of keys
|
||||||
|
:param port: ssh port int
|
||||||
|
:return: None
|
||||||
|
"""
|
||||||
|
if (ip, port) in self.connections:
|
||||||
|
logger.info('SSH_MANAGER:Close connection for {ip}:{port}'.format(
|
||||||
|
ip=ip, port=port))
|
||||||
|
self.connections[(ip, port)].clear()
|
||||||
|
logger.info('SSH_MANAGER:Create new connection for '
|
||||||
|
'{ip}:{port}'.format(ip=ip, port=port))
|
||||||
|
|
||||||
|
self.connections[(ip, port)] = SSHClient(
|
||||||
|
host=ip,
|
||||||
|
port=port,
|
||||||
|
username=login,
|
||||||
|
password=password,
|
||||||
|
private_keys=keys if keys is not None else []
|
||||||
|
)
|
||||||
|
|
||||||
|
def execute(self, ip, cmd, port=22):
|
||||||
remote = self._get_remote(ip=ip, port=port)
|
remote = self._get_remote(ip=ip, port=port)
|
||||||
return remote.execute(cmd)
|
return remote.execute(cmd)
|
||||||
|
|
||||||
|
def check_call(self, ip, cmd, port=22, verbose=False):
|
||||||
|
remote = self._get_remote(ip=ip, port=port)
|
||||||
|
return remote.check_call(cmd, verbose)
|
||||||
|
|
||||||
|
def execute_on_remote(self, ip, cmd, port=22, err_msg=None,
|
||||||
|
jsonify=False, assert_ec_equal=None,
|
||||||
|
raise_on_assert=True):
|
||||||
|
"""Execute ``cmd`` on ``remote`` and return result.
|
||||||
|
|
||||||
|
:param ip: ip of host
|
||||||
|
:param port: ssh port
|
||||||
|
:param cmd: command to execute on remote host
|
||||||
|
:param err_msg: custom error message
|
||||||
|
:param assert_ec_equal: list of expected exit_code
|
||||||
|
:param raise_on_assert: Boolean
|
||||||
|
:return: dict
|
||||||
|
:raise: Exception
|
||||||
|
"""
|
||||||
|
if assert_ec_equal is None:
|
||||||
|
assert_ec_equal = [0]
|
||||||
|
result = self.execute(ip=ip, port=port, cmd=cmd)
|
||||||
|
if result['exit_code'] not in assert_ec_equal:
|
||||||
|
error_details = {
|
||||||
|
'command': cmd,
|
||||||
|
'host': ip,
|
||||||
|
'stdout': result['stdout'],
|
||||||
|
'stderr': result['stderr'],
|
||||||
|
'exit_code': result['exit_code']}
|
||||||
|
|
||||||
|
error_msg = (err_msg or "Unexpected exit_code returned:"
|
||||||
|
" actual {0}, expected {1}."
|
||||||
|
.format(error_details['exit_code'],
|
||||||
|
' '.join(map(str, assert_ec_equal))))
|
||||||
|
log_msg = ("{0} Command: '{1}' "
|
||||||
|
"Details: {2}".format(error_msg, cmd, error_details))
|
||||||
|
logger.error(log_msg)
|
||||||
|
if raise_on_assert:
|
||||||
|
raise Exception(log_msg)
|
||||||
|
|
||||||
|
result['stdout_str'] = ''.join(result['stdout'])
|
||||||
|
result['stdout_len'] = len(result['stdout'])
|
||||||
|
result['stderr_str'] = ''.join(result['stderr'])
|
||||||
|
result['stderr_len'] = len(result['stderr'])
|
||||||
|
|
||||||
|
if jsonify:
|
||||||
|
try:
|
||||||
|
result['stdout_json'] = \
|
||||||
|
self._json_deserialize(result['stdout_str'])
|
||||||
|
except Exception:
|
||||||
|
error_msg = (
|
||||||
|
"Unable to deserialize output of command"
|
||||||
|
" '{0}' on host {1}".format(cmd, ip))
|
||||||
|
logger.error(error_msg)
|
||||||
|
raise Exception(error_msg)
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
def _json_deserialize(self, json_string):
|
||||||
|
""" Deserialize json_string and return object
|
||||||
|
|
||||||
|
:param json_string: string or list with json
|
||||||
|
:return: obj
|
||||||
|
:raise: Exception
|
||||||
|
"""
|
||||||
|
if isinstance(json_string, list):
|
||||||
|
json_string = ''.join(json_string)
|
||||||
|
|
||||||
|
try:
|
||||||
|
obj = json.loads(json_string)
|
||||||
|
except Exception:
|
||||||
|
log_msg = "Unable to deserialize"
|
||||||
|
logger.error("{0}. Actual string:\n{1}".format(log_msg,
|
||||||
|
json_string))
|
||||||
|
raise Exception(log_msg)
|
||||||
|
return obj
|
||||||
|
|
||||||
def open_on_remote(self, ip, path, mode='r', port=22):
|
def open_on_remote(self, ip, path, mode='r', port=22):
|
||||||
remote = self._get_remote(ip=ip, port=port)
|
remote = self._get_remote(ip=ip, port=port)
|
||||||
return remote.open(path, mode)
|
return remote.open(path, mode)
|
||||||
|
|||||||
@@ -311,7 +311,7 @@ def install_pkg_2(ip, pkg_name, port=22):
|
|||||||
:return: exit code of installation
|
:return: exit code of installation
|
||||||
"""
|
"""
|
||||||
ssh_manager = SSHManager()
|
ssh_manager = SSHManager()
|
||||||
remote_status = ssh_manager.execute_on_remote(
|
remote_status = ssh_manager.execute(
|
||||||
ip=ip,
|
ip=ip,
|
||||||
port=port,
|
port=port,
|
||||||
cmd="rpm -q '{0}'".format(pkg_name)
|
cmd="rpm -q '{0}'".format(pkg_name)
|
||||||
@@ -320,7 +320,7 @@ def install_pkg_2(ip, pkg_name, port=22):
|
|||||||
logger.info("Package '{0}' already installed.".format(pkg_name))
|
logger.info("Package '{0}' already installed.".format(pkg_name))
|
||||||
else:
|
else:
|
||||||
logger.info("Installing package '{0}' ...".format(pkg_name))
|
logger.info("Installing package '{0}' ...".format(pkg_name))
|
||||||
remote_status = ssh_manager.execute_on_remote(
|
remote_status = ssh_manager.execute(
|
||||||
ip=ip,
|
ip=ip,
|
||||||
port=port,
|
port=port,
|
||||||
cmd="yum -y install {0}".format(pkg_name)
|
cmd="yum -y install {0}".format(pkg_name)
|
||||||
|
|||||||
@@ -38,7 +38,6 @@ from fuelweb_test.helpers.fuel_actions import PostgresActions
|
|||||||
from fuelweb_test.helpers.fuel_actions import NessusActions
|
from fuelweb_test.helpers.fuel_actions import NessusActions
|
||||||
from fuelweb_test.helpers.ntp import GroupNtpSync
|
from fuelweb_test.helpers.ntp import GroupNtpSync
|
||||||
from fuelweb_test.helpers.ssh_manager import SSHManager
|
from fuelweb_test.helpers.ssh_manager import SSHManager
|
||||||
from fuelweb_test.helpers.utils import run_on_remote
|
|
||||||
from fuelweb_test.helpers.utils import TimeStat
|
from fuelweb_test.helpers.utils import TimeStat
|
||||||
from fuelweb_test.helpers import multiple_networks_hacks
|
from fuelweb_test.helpers import multiple_networks_hacks
|
||||||
from fuelweb_test.models.fuel_web_client import FuelWebClient
|
from fuelweb_test.models.fuel_web_client import FuelWebClient
|
||||||
@@ -372,35 +371,46 @@ class EnvironmentModel(object):
|
|||||||
return True
|
return True
|
||||||
|
|
||||||
def set_admin_ssh_password(self):
|
def set_admin_ssh_password(self):
|
||||||
|
new_login = settings.SSH_CREDENTIALS['login']
|
||||||
|
new_password = settings.SSH_CREDENTIALS['password']
|
||||||
try:
|
try:
|
||||||
with self.d_env.get_admin_remote(
|
self.ssh_manager.execute_on_remote(
|
||||||
login=settings.SSH_CREDENTIALS['login'],
|
ip=self.ssh_manager.admin_ip,
|
||||||
password=settings.SSH_CREDENTIALS['password']) as remote:
|
cmd='date'
|
||||||
self.execute_remote_cmd(remote, 'date')
|
)
|
||||||
logger.debug('Accessing admin node using SSH: SUCCESS')
|
logger.debug('Accessing admin node using SSH: SUCCESS')
|
||||||
except Exception:
|
except Exception:
|
||||||
logger.debug('Accessing admin node using SSH credentials:'
|
logger.debug('Accessing admin node using SSH credentials:'
|
||||||
' FAIL, trying to change password from default')
|
' FAIL, trying to change password from default')
|
||||||
with self.d_env.get_admin_remote(
|
self.ssh_manager.update_connection(
|
||||||
login='root', password='r00tme') as remote:
|
ip=self.ssh_manager.admin_ip,
|
||||||
self.execute_remote_cmd(
|
login='root',
|
||||||
remote, 'echo -e "{1}\\n{1}" | passwd {0}'
|
password='r00tme'
|
||||||
.format(settings.SSH_CREDENTIALS['login'],
|
)
|
||||||
settings.SSH_CREDENTIALS['password']))
|
self.ssh_manager.execute_on_remote(
|
||||||
|
ip=self.ssh_manager.admin_ip,
|
||||||
|
cmd='echo -e "{1}\\n{1}" | passwd {0}'.format(new_login,
|
||||||
|
new_password)
|
||||||
|
)
|
||||||
|
self.ssh_manager.update_connection(
|
||||||
|
ip=self.ssh_manager.admin_ip,
|
||||||
|
login=new_login,
|
||||||
|
password=new_password
|
||||||
|
)
|
||||||
logger.debug("Admin node password has changed.")
|
logger.debug("Admin node password has changed.")
|
||||||
logger.info("Admin node login name: '{0}' , password: '{1}'".
|
logger.info("Admin node login name: '{0}' , password: '{1}'".
|
||||||
format(settings.SSH_CREDENTIALS['login'],
|
format(new_login, new_password))
|
||||||
settings.SSH_CREDENTIALS['password']))
|
|
||||||
|
|
||||||
def set_admin_keystone_password(self):
|
def set_admin_keystone_password(self):
|
||||||
try:
|
try:
|
||||||
self.fuel_web.client.get_releases()
|
self.fuel_web.client.get_releases()
|
||||||
# TODO(akostrikov) CENTOS7 except exceptions.Unauthorized:
|
# TODO(akostrikov) CENTOS7 except exceptions.Unauthorized:
|
||||||
except:
|
except:
|
||||||
with self.d_env.get_admin_remote() as remote:
|
self.ssh_manager.execute_on_remote(
|
||||||
self.execute_remote_cmd(
|
ip=self.ssh_manager.admin_ip,
|
||||||
remote, 'fuel user --newpass {0} --change-password'
|
cmd='fuel user --newpass {0} --change-password'.format(
|
||||||
.format(settings.KEYSTONE_CREDS['password']))
|
settings.KEYSTONE_CREDS['password'])
|
||||||
|
)
|
||||||
logger.info(
|
logger.info(
|
||||||
'New Fuel UI (keystone) username: "{0}", password: "{1}"'
|
'New Fuel UI (keystone) username: "{0}", password: "{1}"'
|
||||||
.format(settings.KEYSTONE_CREDS['username'],
|
.format(settings.KEYSTONE_CREDS['username'],
|
||||||
@@ -461,8 +471,11 @@ class EnvironmentModel(object):
|
|||||||
"temporary-{0}\nbaseurl={1}/"
|
"temporary-{0}\nbaseurl={1}/"
|
||||||
"\ngpgcheck=0\npriority="
|
"\ngpgcheck=0\npriority="
|
||||||
"1' > {2}").format(i, url, conf_file)
|
"1' > {2}").format(i, url, conf_file)
|
||||||
with self.d_env.get_admin_remote() as remote:
|
|
||||||
remote.execute(cmd)
|
self.ssh_manager.execute(
|
||||||
|
ip=self.ssh_manager.admin_ip,
|
||||||
|
cmd=cmd
|
||||||
|
)
|
||||||
self.admin_install_updates()
|
self.admin_install_updates()
|
||||||
if settings.MULTIPLE_NETWORKS:
|
if settings.MULTIPLE_NETWORKS:
|
||||||
self.describe_second_admin_interface()
|
self.describe_second_admin_interface()
|
||||||
@@ -479,11 +492,13 @@ class EnvironmentModel(object):
|
|||||||
settings.FUEL_STATS_HOST, settings.FUEL_STATS_PORT
|
settings.FUEL_STATS_HOST, settings.FUEL_STATS_PORT
|
||||||
))
|
))
|
||||||
if settings.PATCHING_DISABLE_UPDATES:
|
if settings.PATCHING_DISABLE_UPDATES:
|
||||||
with self.d_env.get_admin_remote() as remote:
|
|
||||||
cmd = "find /etc/yum.repos.d/ -type f -regextype posix-egrep" \
|
cmd = "find /etc/yum.repos.d/ -type f -regextype posix-egrep" \
|
||||||
" -regex '.*/mos[0-9,\.]+\-(updates|security).repo' | " \
|
" -regex '.*/mos[0-9,\.]+\-(updates|security).repo' | " \
|
||||||
"xargs -n1 -i sed '$aenabled=0' -i {}"
|
"xargs -n1 -i sed '$aenabled=0' -i {}"
|
||||||
self.execute_remote_cmd(remote, cmd)
|
self.ssh_manager.execute_on_remote(
|
||||||
|
ip=self.ssh_manager.admin_ip,
|
||||||
|
cmd=cmd
|
||||||
|
)
|
||||||
|
|
||||||
@update_rpm_packages
|
@update_rpm_packages
|
||||||
@upload_manifests
|
@upload_manifests
|
||||||
@@ -501,21 +516,30 @@ class EnvironmentModel(object):
|
|||||||
@logwrap
|
@logwrap
|
||||||
def wait_for_external_config(self, timeout=120):
|
def wait_for_external_config(self, timeout=120):
|
||||||
check_cmd = 'pkill -0 -f wait_for_external_config'
|
check_cmd = 'pkill -0 -f wait_for_external_config'
|
||||||
with self.d_env.get_admin_remote() as remote:
|
|
||||||
if MASTER_IS_CENTOS7:
|
if MASTER_IS_CENTOS7:
|
||||||
remote.execute(check_cmd)
|
self.ssh_manager.execute(
|
||||||
|
ip=self.ssh_manager.admin_ip,
|
||||||
|
cmd=check_cmd
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
wait(
|
wait(
|
||||||
lambda: remote.execute(check_cmd)['exit_code'] == 0,
|
lambda: self.ssh_manager.execute(
|
||||||
timeout=timeout)
|
ip=self.ssh_manager.admin_ip,
|
||||||
|
cmd=check_cmd)['exit_code'] == 0, timeout=timeout)
|
||||||
|
|
||||||
@logwrap
|
@logwrap
|
||||||
def kill_wait_for_external_config(self):
|
def kill_wait_for_external_config(self):
|
||||||
kill_cmd = 'pkill -f "^wait_for_external_config"'
|
kill_cmd = 'pkill -f "^wait_for_external_config"'
|
||||||
check_cmd = 'pkill -0 -f "^wait_for_external_config"; [[ $? -eq 1 ]]'
|
check_cmd = 'pkill -0 -f "^wait_for_external_config"; [[ $? -eq 1 ]]'
|
||||||
with self.d_env.get_admin_remote() as remote:
|
self.ssh_manager.execute_on_remote(
|
||||||
run_on_remote(remote, kill_cmd)
|
ip=self.ssh_manager.admin_ip,
|
||||||
run_on_remote(remote, check_cmd)
|
cmd=kill_cmd
|
||||||
|
)
|
||||||
|
self.ssh_manager.execute_on_remote(
|
||||||
|
ip=self.ssh_manager.admin_ip,
|
||||||
|
cmd=check_cmd
|
||||||
|
)
|
||||||
|
|
||||||
@retry(count=3, delay=60)
|
@retry(count=3, delay=60)
|
||||||
def sync_time(self, nailgun_nodes=None):
|
def sync_time(self, nailgun_nodes=None):
|
||||||
@@ -573,8 +597,11 @@ class EnvironmentModel(object):
|
|||||||
"--ifaces {iface} " \
|
"--ifaces {iface} " \
|
||||||
"--repeat 3 " \
|
"--repeat 3 " \
|
||||||
"--timeout 10".format(iface=iface)
|
"--timeout 10".format(iface=iface)
|
||||||
with self.d_env.get_admin_remote() as admin_remote:
|
|
||||||
out = admin_remote.execute(command)['stdout']
|
out = self.ssh_manager.execute(
|
||||||
|
ip=self.ssh_manager.admin_ip,
|
||||||
|
cmd=command
|
||||||
|
)['stdout']
|
||||||
|
|
||||||
assert_true(self.get_admin_node_ip() in "".join(out),
|
assert_true(self.get_admin_node_ip() in "".join(out),
|
||||||
"dhcpcheck doesn't discover master ip")
|
"dhcpcheck doesn't discover master ip")
|
||||||
@@ -585,9 +612,11 @@ class EnvironmentModel(object):
|
|||||||
logger.warning('Default image for bootstrap '
|
logger.warning('Default image for bootstrap '
|
||||||
'is not based on Ubuntu!')
|
'is not based on Ubuntu!')
|
||||||
return
|
return
|
||||||
with self.d_env.get_admin_remote() as admin_remote:
|
|
||||||
|
bootstrap_images = self.ssh_manager.execute_on_remote(
|
||||||
|
ip=self.ssh_manager.admin_ip,
|
||||||
cmd='fuel-bootstrap --quiet list'
|
cmd='fuel-bootstrap --quiet list'
|
||||||
bootstrap_images = run_on_remote(admin_remote, cmd)
|
)['stdout']
|
||||||
assert_true(any('active' in line for line in bootstrap_images),
|
assert_true(any('active' in line for line in bootstrap_images),
|
||||||
'Ubuntu bootstrap image wasn\'t built and activated! '
|
'Ubuntu bootstrap image wasn\'t built and activated! '
|
||||||
'See logs in /var/log/fuel-bootstrap-image-build.log '
|
'See logs in /var/log/fuel-bootstrap-image-build.log '
|
||||||
@@ -595,15 +624,18 @@ class EnvironmentModel(object):
|
|||||||
|
|
||||||
def admin_install_pkg(self, pkg_name):
|
def admin_install_pkg(self, pkg_name):
|
||||||
"""Install a package <pkg_name> on the admin node"""
|
"""Install a package <pkg_name> on the admin node"""
|
||||||
with self.d_env.get_admin_remote() as remote:
|
remote_status = self.ssh_manager.execute(
|
||||||
remote_status = remote.execute("rpm -q {0}'".format(pkg_name))
|
ip=self.ssh_manager.admin_ip,
|
||||||
|
cmd="rpm -q {0}'".format(pkg_name)
|
||||||
|
)
|
||||||
if remote_status['exit_code'] == 0:
|
if remote_status['exit_code'] == 0:
|
||||||
logger.info("Package '{0}' already installed."
|
logger.info("Package '{0}' already installed.".format(pkg_name))
|
||||||
.format(pkg_name))
|
|
||||||
else:
|
else:
|
||||||
logger.info("Installing package '{0}' ...".format(pkg_name))
|
logger.info("Installing package '{0}' ...".format(pkg_name))
|
||||||
remote_status = remote.execute("yum -y install {0}"
|
remote_status = self.ssh_manager.execute(
|
||||||
.format(pkg_name))
|
ip=self.ssh_manager.admin_ip,
|
||||||
|
cmd="yum -y install {0}".format(pkg_name)
|
||||||
|
)
|
||||||
logger.info("Installation of the package '{0}' has been"
|
logger.info("Installation of the package '{0}' has been"
|
||||||
" completed with exit code {1}"
|
" completed with exit code {1}"
|
||||||
.format(pkg_name, remote_status['exit_code']))
|
.format(pkg_name, remote_status['exit_code']))
|
||||||
@@ -611,10 +643,15 @@ class EnvironmentModel(object):
|
|||||||
|
|
||||||
def admin_run_service(self, service_name):
|
def admin_run_service(self, service_name):
|
||||||
"""Start a service <service_name> on the admin node"""
|
"""Start a service <service_name> on the admin node"""
|
||||||
with self.d_env.get_admin_remote() as admin_remote:
|
|
||||||
admin_remote.execute("service {0} start".format(service_name))
|
self.ssh_manager.execute(
|
||||||
remote_status = admin_remote.execute("service {0} status"
|
ip=self.ssh_manager.admin_ip,
|
||||||
.format(service_name))
|
cmd="service {0} start".format(service_name)
|
||||||
|
)
|
||||||
|
remote_status = self.ssh_manager.execute(
|
||||||
|
ip=self.ssh_manager.admin_ip,
|
||||||
|
cmd="service {0} status".format(service_name)
|
||||||
|
)
|
||||||
if any('running...' in status for status in remote_status['stdout']):
|
if any('running...' in status for status in remote_status['stdout']):
|
||||||
logger.info("Service '{0}' is running".format(service_name))
|
logger.info("Service '{0}' is running".format(service_name))
|
||||||
else:
|
else:
|
||||||
@@ -630,8 +667,12 @@ class EnvironmentModel(object):
|
|||||||
def admin_install_updates(self):
|
def admin_install_updates(self):
|
||||||
logger.info('Searching for updates..')
|
logger.info('Searching for updates..')
|
||||||
update_command = 'yum clean expire-cache; yum update -y'
|
update_command = 'yum clean expire-cache; yum update -y'
|
||||||
with self.d_env.get_admin_remote() as admin_remote:
|
|
||||||
update_result = admin_remote.execute(update_command)
|
update_result = self.ssh_manager.execute(
|
||||||
|
ip=self.ssh_manager.admin_ip,
|
||||||
|
cmd=update_command
|
||||||
|
)
|
||||||
|
|
||||||
logger.info('Result of "{1}" command on master node: '
|
logger.info('Result of "{1}" command on master node: '
|
||||||
'{0}'.format(update_result, update_command))
|
'{0}'.format(update_result, update_command))
|
||||||
assert_equal(int(update_result['exit_code']), 0,
|
assert_equal(int(update_result['exit_code']), 0,
|
||||||
@@ -655,8 +696,11 @@ class EnvironmentModel(object):
|
|||||||
logger.info('{0} packet(s) were updated'.format(updates_count))
|
logger.info('{0} packet(s) were updated'.format(updates_count))
|
||||||
|
|
||||||
cmd = 'dockerctl destroy all; bootstrap_admin_node.sh;'
|
cmd = 'dockerctl destroy all; bootstrap_admin_node.sh;'
|
||||||
with self.d_env.get_admin_remote() as admin_remote:
|
|
||||||
result = admin_remote.execute(cmd)
|
result = self.ssh_manager.execute(
|
||||||
|
ip=self.ssh_manager.admin_ip,
|
||||||
|
cmd=cmd
|
||||||
|
)
|
||||||
logger.info('Result of "{1}" command on master node: '
|
logger.info('Result of "{1}" command on master node: '
|
||||||
'{0}'.format(result, cmd))
|
'{0}'.format(result, cmd))
|
||||||
assert_equal(int(result['exit_code']), 0,
|
assert_equal(int(result['exit_code']), 0,
|
||||||
@@ -670,23 +714,27 @@ class EnvironmentModel(object):
|
|||||||
def modify_resolv_conf(self, nameservers=None, merge=True):
|
def modify_resolv_conf(self, nameservers=None, merge=True):
|
||||||
if nameservers is None:
|
if nameservers is None:
|
||||||
nameservers = []
|
nameservers = []
|
||||||
with self.d_env.get_admin_remote() as remote:
|
|
||||||
resolv_conf = remote.execute('cat /etc/resolv.conf')
|
resolv_conf = self.ssh_manager.execute(
|
||||||
assert_equal(0, resolv_conf['exit_code'], 'Executing "{0}" on the '
|
ip=self.ssh_manager.admin_ip,
|
||||||
'admin node has failed with: {1}'
|
cmd='cat /etc/resolv.conf'
|
||||||
.format('cat /etc/resolv.conf',
|
)
|
||||||
resolv_conf['stderr']))
|
assert_equal(0, resolv_conf['exit_code'],
|
||||||
|
'Executing "{0}" on the admin node has failed with: {1}'
|
||||||
|
.format('cat /etc/resolv.conf', resolv_conf['stderr']))
|
||||||
if merge:
|
if merge:
|
||||||
nameservers.extend(resolv_conf['stdout'])
|
nameservers.extend(resolv_conf['stdout'])
|
||||||
|
|
||||||
resolv_keys = ['search', 'domain', 'nameserver']
|
resolv_keys = ['search', 'domain', 'nameserver']
|
||||||
resolv_new = "".join('{0}\n'.format(ns) for ns in nameservers
|
resolv_new = "".join('{0}\n'.format(ns) for ns in nameservers
|
||||||
if any(x in ns for x in resolv_keys))
|
if any(x in ns for x in resolv_keys))
|
||||||
logger.debug('echo "{0}" > /etc/resolv.conf'.format(resolv_new))
|
logger.debug('echo "{0}" > /etc/resolv.conf'.format(resolv_new))
|
||||||
echo_cmd = 'echo "{0}" > /etc/resolv.conf'.format(resolv_new)
|
echo_cmd = 'echo "{0}" > /etc/resolv.conf'.format(resolv_new)
|
||||||
echo_result = remote.execute(echo_cmd)
|
echo_result = self.ssh_manager.execute(
|
||||||
assert_equal(0, echo_result['exit_code'], 'Executing "{0}" on the '
|
ip=self.ssh_manager.admin_ip,
|
||||||
'admin node has failed with: {1}'
|
cmd=echo_cmd
|
||||||
|
)
|
||||||
|
assert_equal(0, echo_result['exit_code'],
|
||||||
|
'Executing "{0}" on the admin node has failed with: {1}'
|
||||||
.format(echo_cmd, echo_result['stderr']))
|
.format(echo_cmd, echo_result['stderr']))
|
||||||
return resolv_conf['stdout']
|
return resolv_conf['stdout']
|
||||||
|
|
||||||
@@ -727,17 +775,27 @@ class EnvironmentModel(object):
|
|||||||
add_second_admin_ip, second_admin_if, second_admin_ip)
|
add_second_admin_ip, second_admin_if, second_admin_ip)
|
||||||
logger.debug('Trying to assign {0} IP to the {1} on master node...'.
|
logger.debug('Trying to assign {0} IP to the {1} on master node...'.
|
||||||
format(second_admin_ip, second_admin_if))
|
format(second_admin_ip, second_admin_if))
|
||||||
with self.d_env.get_admin_remote() as remote:
|
|
||||||
result = remote.execute(cmd)
|
result = self.ssh_manager.execute(
|
||||||
|
ip=self.ssh_manager.admin_ip,
|
||||||
|
cmd=cmd
|
||||||
|
)
|
||||||
assert_equal(result['exit_code'], 0, ('Failed to assign second admin '
|
assert_equal(result['exit_code'], 0, ('Failed to assign second admin '
|
||||||
'IP address on master node: {0}').format(result))
|
'IP address on master node: {0}').format(result))
|
||||||
logger.debug('Done: {0}'.format(result['stdout']))
|
logger.debug('Done: {0}'.format(result['stdout']))
|
||||||
with self.d_env.get_admin_remote() as remote:
|
|
||||||
|
# TODO for ssh manager
|
||||||
multiple_networks_hacks.configure_second_admin_dhcp(
|
multiple_networks_hacks.configure_second_admin_dhcp(
|
||||||
remote, second_admin_if)
|
self.ssh_manager.admin_ip,
|
||||||
|
second_admin_if
|
||||||
|
)
|
||||||
multiple_networks_hacks.configure_second_admin_firewall(
|
multiple_networks_hacks.configure_second_admin_firewall(
|
||||||
remote, second_admin_network, second_admin_netmask,
|
self.ssh_manager.admin_ip,
|
||||||
second_admin_if, self.get_admin_node_ip())
|
second_admin_network,
|
||||||
|
second_admin_netmask,
|
||||||
|
second_admin_if,
|
||||||
|
self.get_admin_node_ip()
|
||||||
|
)
|
||||||
|
|
||||||
@logwrap
|
@logwrap
|
||||||
def get_masternode_uuid(self):
|
def get_masternode_uuid(self):
|
||||||
|
|||||||
@@ -269,15 +269,14 @@ class CommandLineTest(test_cli_base.CommandLine):
|
|||||||
'Some slaves do not become online after revert!!'
|
'Some slaves do not become online after revert!!'
|
||||||
' Expected {0} Actual {1}'.format(nodes, online_nodes))
|
' Expected {0} Actual {1}'.format(nodes, online_nodes))
|
||||||
|
|
||||||
res = self.ssh_manager.execute_on_remote(
|
self.ssh_manager.execute_on_remote(
|
||||||
ip=self.ssh_manager.admin_ip,
|
ip=self.ssh_manager.admin_ip,
|
||||||
cmd='fuel --env {0} env delete'.format(cluster_id)
|
cmd='fuel --env {0} env delete'.format(cluster_id)
|
||||||
)
|
)
|
||||||
assert_true(res['exit_code'] == 0)
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
wait(lambda:
|
wait(lambda:
|
||||||
self.ssh_manager.execute_on_remote(
|
self.ssh_manager.execute(
|
||||||
ip=self.ssh_manager.admin_ip,
|
ip=self.ssh_manager.admin_ip,
|
||||||
cmd="fuel env | awk '{print $1}' | tail -n 1 | "
|
cmd="fuel env | awk '{print $1}' | tail -n 1 | "
|
||||||
"grep '^.$'"
|
"grep '^.$'"
|
||||||
|
|||||||
Reference in New Issue
Block a user