Additional resource cleanup

These resources were previously not properly closed when leading to
ResourceWarnings being printed to the end user.

Change-Id: I27e3f209f948b4c7c04e7994fc4dc7489f28f720
Related-Bug: #1837393
This commit is contained in:
Alex Schultz 2019-08-02 10:09:31 -06:00
parent 6521585912
commit 6e13179ecd
7 changed files with 44 additions and 12 deletions

View File

@ -51,6 +51,19 @@ class FakeHandle(object):
return return
class FakeFile(FakeHandle):
def __init__(self, contents):
self.contents = contents
def read(self):
if not self.contents:
raise ValueError('I/O operation on closed file')
return self.contents
def close(self):
self.contents = None
class FakeWebSocket(FakeHandle): class FakeWebSocket(FakeHandle):
def wait_for_messages(self, timeout=None): def wait_for_messages(self, timeout=None):

View File

@ -77,6 +77,8 @@ class TestOvercloudRolesGenerateData(fakes.TestDeployOvercloud):
generate_roles_mock.return_value = 'foo' generate_roles_mock.return_value = 'foo'
capture_mock = mock.MagicMock() capture_mock = mock.MagicMock()
self.cmd._capture_output = capture_mock self.cmd._capture_output = capture_mock
stop_capture_mock = mock.MagicMock()
self.cmd._stop_capture_output = stop_capture_mock
arglist = ['--roles-path', '/tmp', 'Controller', 'Compute'] arglist = ['--roles-path', '/tmp', 'Controller', 'Compute']
verifylist = [ verifylist = [
@ -93,6 +95,7 @@ class TestOvercloudRolesGenerateData(fakes.TestDeployOvercloud):
generate_roles_mock.assert_called_once_with('/tmp', generate_roles_mock.assert_called_once_with('/tmp',
['Controller', 'Compute'], ['Controller', 'Compute'],
True) True)
stop_capture_mock.assert_called_once_with(None)
@mock.patch( @mock.patch(
'tripleo_common.utils.roles.generate_roles_data_from_directory') 'tripleo_common.utils.roles.generate_roles_data_from_directory')
@ -106,6 +109,8 @@ class TestOvercloudRolesGenerateData(fakes.TestDeployOvercloud):
generate_roles_mock.return_value = 'foo' generate_roles_mock.return_value = 'foo'
capture_mock = mock.MagicMock() capture_mock = mock.MagicMock()
self.cmd._capture_output = capture_mock self.cmd._capture_output = capture_mock
stop_capture_mock = mock.MagicMock()
self.cmd._stop_capture_output = stop_capture_mock
arglist = ['--roles-path', '/tmp', '-o', 'foo.yaml', arglist = ['--roles-path', '/tmp', '-o', 'foo.yaml',
'Controller', 'Compute'] 'Controller', 'Compute']
@ -124,6 +129,7 @@ class TestOvercloudRolesGenerateData(fakes.TestDeployOvercloud):
generate_roles_mock.assert_called_once_with('/tmp', generate_roles_mock.assert_called_once_with('/tmp',
['Controller', 'Compute'], ['Controller', 'Compute'],
True) True)
stop_capture_mock.assert_called_once_with('foo.yaml')
@mock.patch('tripleo_common.utils.roles.get_roles_list_from_directory') @mock.patch('tripleo_common.utils.roles.get_roles_list_from_directory')
@mock.patch('os.path.realpath') @mock.patch('os.path.realpath')

View File

@ -17,6 +17,7 @@ import mock
from osc_lib.tests import utils from osc_lib.tests import utils
from tripleoclient import exceptions from tripleoclient import exceptions
from tripleoclient.tests.fakes import FakeFile
from tripleoclient.workflows import deployment from tripleoclient.workflows import deployment
@ -60,9 +61,7 @@ class TestDeploymentWorkflows(utils.TestCommand):
ssh_key = 'test-key' ssh_key = 'test-key'
mock_tempfile.mkdtemp.return_value = '/foo' mock_tempfile.mkdtemp.return_value = '/foo'
mock_read = mock.Mock() mock_open.return_value = FakeFile('key')
mock_read.read.return_value = 'key'
mock_open.return_value = mock_read
mock_state = mock.Mock() mock_state = mock.Mock()
mock_state.state = 'SUCCESS' mock_state.state = 'SUCCESS'
self.workflow.executions.get.return_value = mock_state self.workflow.executions.get.return_value = mock_state
@ -102,9 +101,7 @@ class TestDeploymentWorkflows(utils.TestCommand):
ssh_key = 'test-key' ssh_key = 'test-key'
mock_tempfile.mkdtemp.return_value = '/foo' mock_tempfile.mkdtemp.return_value = '/foo'
mock_read = mock.Mock() mock_open.side_effect = [FakeFile('pubkey'), FakeFile('privkey')]
mock_read.read.return_value = 'key'
mock_open.return_value = mock_read
mock_state = mock.Mock() mock_state = mock.Mock()
mock_state.state = 'ERROR' mock_state.state = 'ERROR'
mock_state.to_dict.return_value = dict(state_info='an error') mock_state.to_dict.return_value = dict(state_info='an error')

View File

@ -436,7 +436,8 @@ class PrepareImageFiles(command.Command):
modify_role = parsed_args.modify_role modify_role = parsed_args.modify_role
append_tag = time.strftime('-modified-%Y%m%d%H%M%S') append_tag = time.strftime('-modified-%Y%m%d%H%M%S')
if parsed_args.modify_vars: if parsed_args.modify_vars:
modify_vars = yaml.safe_load(open(parsed_args.modify_vars).read()) with open(parsed_args.modify_vars) as m:
modify_vars = yaml.safe_load(m.read())
prepare_data = kolla_builder.container_images_prepare( prepare_data = kolla_builder.container_images_prepare(
excludes=parsed_args.excludes, excludes=parsed_args.excludes,

View File

@ -99,6 +99,7 @@ class DownloadConfig(command.Command):
self.log.debug("config-download tempurl: %s" % tempurl) self.log.debug("config-download tempurl: %s" % tempurl)
f = request.urlopen(tempurl) f = request.urlopen(tempurl)
tarball_contents = f.read() tarball_contents = f.read()
f.close()
tarball_name = "%s-config.tar.gz" % name tarball_name = "%s-config.tar.gz" % name
tarball_path = os.path.join(config_dir, tarball_name) tarball_path = os.path.join(config_dir, tarball_name)

View File

@ -64,6 +64,11 @@ class RolesGenerate(RolesBaseCommand):
if filename is not None: if filename is not None:
sys.stdout = open(filename, 'w') sys.stdout = open(filename, 'w')
def _stop_capture_output(self, filename=None):
"""Stop capturing stdout to a file if provided"""
if filename is not None:
sys.stdout.close()
def take_action(self, parsed_args): def take_action(self, parsed_args):
"""Generate roles_data.yaml from imputed roles """Generate roles_data.yaml from imputed roles
@ -83,6 +88,7 @@ class RolesGenerate(RolesBaseCommand):
roles_path, list(requested_roles.keys()), roles_path, list(requested_roles.keys()),
parsed_args.skip_validate) parsed_args.skip_validate)
sys.stdout.write(roles_data) sys.stdout.write(roles_data)
self._stop_capture_output(parsed_args.output_file)
class RoleList(RolesBaseCommand): class RoleList(RolesBaseCommand):

View File

@ -189,11 +189,15 @@ def wait_for_ssh_port(host):
"Timed out waiting for port 22 from %s" % host) "Timed out waiting for port 22 from %s" % host)
# first check ipv4 then check ipv6 # first check ipv4 then check ipv6
try: try:
socket.socket().connect((host, 22)) sock = socket.socket()
sock.connect((host, 22))
sock.close()
return return
except socket.error: except socket.error:
try: try:
socket.socket(socket.AF_INET6).connect((host, 22)) sock = socket.socket(socket.AF_INET6)
sock.connect((host, 22))
sock.close()
return return
except socket.error: except socket.error:
pass pass
@ -244,7 +248,10 @@ def enable_ssh_admin(log, clients, plan_name, hosts, ssh_user, ssh_key):
tmp_key_command = ["ssh-keygen", "-N", "", "-t", "rsa", "-b", "4096", tmp_key_command = ["ssh-keygen", "-N", "", "-t", "rsa", "-b", "4096",
"-f", tmp_key_private, "-C", tmp_key_comment] "-f", tmp_key_private, "-C", tmp_key_comment]
subprocess.check_call(tmp_key_command, stderr=subprocess.STDOUT) subprocess.check_call(tmp_key_command, stderr=subprocess.STDOUT)
tmp_key_public_contents = open(tmp_key_public).read() with open(tmp_key_public) as pubkey:
tmp_key_public_contents = pubkey.read()
with open(tmp_key_private) as privkey:
tmp_key_private_contents = privkey.read()
for host in hosts: for host in hosts:
wait_for_ssh_port(host) wait_for_ssh_port(host)
@ -265,7 +272,7 @@ def enable_ssh_admin(log, clients, plan_name, hosts, ssh_user, ssh_key):
workflow_input = { workflow_input = {
"ssh_user": ssh_user, "ssh_user": ssh_user,
"ssh_servers": hosts, "ssh_servers": hosts,
"ssh_private_key": open(tmp_key_private).read(), "ssh_private_key": tmp_key_private_contents,
"plan_name": plan_name "plan_name": plan_name
} }
@ -329,7 +336,8 @@ def config_download(log, clients, stack, templates,
if output_dir: if output_dir:
workflow_input.update(dict(work_dir=output_dir)) workflow_input.update(dict(work_dir=output_dir))
if override_ansible_cfg: if override_ansible_cfg:
override_ansible_cfg_contents = open(override_ansible_cfg).read() with open(override_ansible_cfg) as cfg:
override_ansible_cfg_contents = cfg.read()
workflow_input.update( workflow_input.update(
dict(override_ansible_cfg=override_ansible_cfg_contents)) dict(override_ansible_cfg=override_ansible_cfg_contents))