Add config parameters for WinRM timeouts
Both the operation and the read timeout can now be configured in the zuul-executor main configuration file. If the network is flaky, increasing these numbers from their defaults might help to lower the rate of aborted Windows builds. Change-Id: I4c25ca6027fc4150ec1c9c49ed286e7b4f20d4dd
This commit is contained in:
parent
33699fa316
commit
cbb0082451
|
@ -522,6 +522,17 @@ The following sections of ``zuul.conf`` are used by the executor:
|
||||||
.. note:: Currently certificate verification is disabled when
|
.. note:: Currently certificate verification is disabled when
|
||||||
connecting to Windows nodes via winrm.
|
connecting to Windows nodes via winrm.
|
||||||
|
|
||||||
|
.. attr:: winrm_operation_timeout_sec
|
||||||
|
:default: None. The Ansible default of 20 is used in this case.
|
||||||
|
|
||||||
|
The timeout for WinRM operations.
|
||||||
|
|
||||||
|
.. attr:: winrm_read_timeout_sec
|
||||||
|
:default: None. The Ansible default of 30 is used in this case.
|
||||||
|
|
||||||
|
The timeout for WinRM read. Increase this if there are intermittent
|
||||||
|
network issues and read timeout errors keep occurring.
|
||||||
|
|
||||||
.. _admin_sitewide_variables:
|
.. _admin_sitewide_variables:
|
||||||
|
|
||||||
.. attr:: variables
|
.. attr:: variables
|
||||||
|
|
|
@ -0,0 +1,33 @@
|
||||||
|
[gearman]
|
||||||
|
server=127.0.0.1
|
||||||
|
|
||||||
|
[statsd]
|
||||||
|
# note, use 127.0.0.1 rather than localhost to avoid getting ipv6
|
||||||
|
# see: https://github.com/jsocol/pystatsd/issues/61
|
||||||
|
server=127.0.0.1
|
||||||
|
|
||||||
|
[scheduler]
|
||||||
|
tenant_config=main.yaml
|
||||||
|
|
||||||
|
[merger]
|
||||||
|
git_dir=/tmp/zuul-test/merger-git
|
||||||
|
git_user_email=zuul@example.com
|
||||||
|
git_user_name=zuul
|
||||||
|
|
||||||
|
[executor]
|
||||||
|
git_dir=/tmp/zuul-test/executor-git
|
||||||
|
winrm_operation_timeout_sec=120
|
||||||
|
winrm_read_timeout_sec=180
|
||||||
|
|
||||||
|
[connection gerrit]
|
||||||
|
driver=gerrit
|
||||||
|
server=review.example.com
|
||||||
|
user=jenkins
|
||||||
|
sshkey=fake_id_rsa_path
|
||||||
|
|
||||||
|
[connection smtp]
|
||||||
|
driver=smtp
|
||||||
|
server=localhost
|
||||||
|
port=25
|
||||||
|
default_from=zuul@example.com
|
||||||
|
default_to=you@example.com
|
|
@ -19,12 +19,12 @@ import yaml
|
||||||
from tests.base import ZuulTestCase
|
from tests.base import ZuulTestCase
|
||||||
|
|
||||||
|
|
||||||
class TestInventory(ZuulTestCase):
|
class TestInventoryBase(ZuulTestCase):
|
||||||
|
|
||||||
tenant_config_file = 'config/inventory/main.yaml'
|
tenant_config_file = 'config/inventory/main.yaml'
|
||||||
|
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
super(TestInventory, self).setUp()
|
super(TestInventoryBase, self).setUp()
|
||||||
self.executor_server.hold_jobs_in_build = True
|
self.executor_server.hold_jobs_in_build = True
|
||||||
A = self.fake_gerrit.addFakeChange('org/project', 'master', 'A')
|
A = self.fake_gerrit.addFakeChange('org/project', 'master', 'A')
|
||||||
self.fake_gerrit.addEvent(A.getPatchsetCreatedEvent(1))
|
self.fake_gerrit.addEvent(A.getPatchsetCreatedEvent(1))
|
||||||
|
@ -41,6 +41,9 @@ class TestInventory(ZuulTestCase):
|
||||||
'setup-inventory.yaml')
|
'setup-inventory.yaml')
|
||||||
return yaml.safe_load(open(setup_inv_path, 'r'))
|
return yaml.safe_load(open(setup_inv_path, 'r'))
|
||||||
|
|
||||||
|
|
||||||
|
class TestInventory(TestInventoryBase):
|
||||||
|
|
||||||
def test_single_inventory(self):
|
def test_single_inventory(self):
|
||||||
|
|
||||||
inventory = self._get_build_inventory('single-inventory')
|
inventory = self._get_build_inventory('single-inventory')
|
||||||
|
@ -157,3 +160,21 @@ class TestInventory(ZuulTestCase):
|
||||||
|
|
||||||
self.executor_server.release()
|
self.executor_server.release()
|
||||||
self.waitUntilSettled()
|
self.waitUntilSettled()
|
||||||
|
|
||||||
|
|
||||||
|
class TestWindowsInventory(TestInventoryBase):
|
||||||
|
config_file = 'zuul-winrm.conf'
|
||||||
|
|
||||||
|
def test_windows_inventory(self):
|
||||||
|
inventory = self._get_build_inventory('hostvars-inventory')
|
||||||
|
windows_host = inventory['all']['hosts']['windows']
|
||||||
|
self.assertEqual(windows_host['ansible_connection'], 'winrm')
|
||||||
|
self.assertEqual(
|
||||||
|
windows_host['ansible_winrm_operation_timeout_sec'],
|
||||||
|
'120')
|
||||||
|
self.assertEqual(
|
||||||
|
windows_host['ansible_winrm_read_timeout_sec'],
|
||||||
|
'180')
|
||||||
|
|
||||||
|
self.executor_server.release()
|
||||||
|
self.waitUntilSettled()
|
||||||
|
|
|
@ -661,6 +661,14 @@ class AnsibleJob(object):
|
||||||
self.winrm_pem_file = get_default(self.executor_server.config,
|
self.winrm_pem_file = get_default(self.executor_server.config,
|
||||||
'executor', 'winrm_cert_pem_file',
|
'executor', 'winrm_cert_pem_file',
|
||||||
'~/.winrm/winrm_client_cert.pem')
|
'~/.winrm/winrm_client_cert.pem')
|
||||||
|
self.winrm_operation_timeout = get_default(
|
||||||
|
self.executor_server.config,
|
||||||
|
'executor',
|
||||||
|
'winrm_operation_timeout_sec')
|
||||||
|
self.winrm_read_timeout = get_default(
|
||||||
|
self.executor_server.config,
|
||||||
|
'executor',
|
||||||
|
'winrm_read_timeout_sec')
|
||||||
self.ssh_agent = SshAgent()
|
self.ssh_agent = SshAgent()
|
||||||
|
|
||||||
self.executor_variables_file = None
|
self.executor_variables_file = None
|
||||||
|
@ -1243,6 +1251,12 @@ class AnsibleJob(object):
|
||||||
# now.
|
# now.
|
||||||
host_vars['ansible_winrm_server_cert_validation'] = \
|
host_vars['ansible_winrm_server_cert_validation'] = \
|
||||||
'ignore'
|
'ignore'
|
||||||
|
if self.winrm_operation_timeout is not None:
|
||||||
|
host_vars['ansible_winrm_operation_timeout_sec'] =\
|
||||||
|
self.winrm_operation_timeout
|
||||||
|
if self.winrm_read_timeout is not None:
|
||||||
|
host_vars['ansible_winrm_read_timeout_sec'] = \
|
||||||
|
self.winrm_read_timeout
|
||||||
|
|
||||||
host_keys = []
|
host_keys = []
|
||||||
for key in node.get('host_keys'):
|
for key in node.get('host_keys'):
|
||||||
|
|
Loading…
Reference in New Issue