Stop running test_schedule_to_all_nodes in the multinode job
After the recent changes we're running 5 tests already, some of them
using several VMs. This should cover scheduling to different conductors
well enough, the nova test just adds random failures on top.
This allows reducing the number of test VMs to 3 per testing node
(6 totally), reducing the resource pressure and allowing giving
each VM a bit more RAM.
Also adding missing VM_SPECS_DISK to the subnode configuration.
Conflicts:
zuul.d/ironic-jobs.yaml
Change-Id: Idde2891b2f15190f327e4298131a6069c58163c0
(cherry picked from commit 1cb1df76d9
)
This commit is contained in:
parent
c3b6ff60da
commit
1db4f007e7
|
@ -414,7 +414,7 @@
|
||||||
vars:
|
vars:
|
||||||
tox_envlist: all
|
tox_envlist: all
|
||||||
tempest_concurrency: 3
|
tempest_concurrency: 3
|
||||||
tempest_test_regex: "(ironic_tempest_plugin.tests.scenario|test_schedule_to_all_nodes)"
|
tempest_test_regex: "ironic_tempest_plugin.tests.scenario"
|
||||||
tempest_test_timeout: 2400
|
tempest_test_timeout: 2400
|
||||||
devstack_localrc:
|
devstack_localrc:
|
||||||
BUILD_TIMEOUT: 2400
|
BUILD_TIMEOUT: 2400
|
||||||
|
@ -444,10 +444,10 @@
|
||||||
IRONIC_TEMPEST_BUILD_TIMEOUT: 600
|
IRONIC_TEMPEST_BUILD_TIMEOUT: 600
|
||||||
IRONIC_TEMPEST_WHOLE_DISK_IMAGE: True
|
IRONIC_TEMPEST_WHOLE_DISK_IMAGE: True
|
||||||
IRONIC_USE_LINK_LOCAL: True
|
IRONIC_USE_LINK_LOCAL: True
|
||||||
IRONIC_VM_COUNT: 6
|
IRONIC_VM_COUNT: 3
|
||||||
IRONIC_VM_EPHEMERAL_DISK: 0
|
IRONIC_VM_EPHEMERAL_DISK: 0
|
||||||
IRONIC_VM_LOG_DIR: '{{ devstack_base_dir }}/ironic-bm-logs'
|
IRONIC_VM_LOG_DIR: '{{ devstack_base_dir }}/ironic-bm-logs'
|
||||||
IRONIC_VM_SPECS_RAM: 384
|
IRONIC_VM_SPECS_RAM: 512
|
||||||
IRONIC_VM_SPECS_DISK: 4
|
IRONIC_VM_SPECS_DISK: 4
|
||||||
OVS_BRIDGE_MAPPINGS: 'mynetwork:brbm,public:br-infra'
|
OVS_BRIDGE_MAPPINGS: 'mynetwork:brbm,public:br-infra'
|
||||||
OVS_PHYSICAL_BRIDGE: brbm
|
OVS_PHYSICAL_BRIDGE: brbm
|
||||||
|
@ -521,11 +521,12 @@
|
||||||
IRONIC_PROVISION_NETWORK_NAME: ironic-provision
|
IRONIC_PROVISION_NETWORK_NAME: ironic-provision
|
||||||
IRONIC_RAMDISK_TYPE: tinyipa
|
IRONIC_RAMDISK_TYPE: tinyipa
|
||||||
IRONIC_USE_LINK_LOCAL: True
|
IRONIC_USE_LINK_LOCAL: True
|
||||||
IRONIC_VM_COUNT: 6
|
IRONIC_VM_COUNT: 3
|
||||||
IRONIC_VM_EPHEMERAL_DISK: 0
|
IRONIC_VM_EPHEMERAL_DISK: 0
|
||||||
IRONIC_VM_LOG_DIR: '{{ devstack_base_dir }}/ironic-bm-logs'
|
IRONIC_VM_LOG_DIR: '{{ devstack_base_dir }}/ironic-bm-logs'
|
||||||
IRONIC_VM_NETWORK_BRIDGE: sub1brbm
|
IRONIC_VM_NETWORK_BRIDGE: sub1brbm
|
||||||
IRONIC_VM_SPECS_RAM: 384
|
IRONIC_VM_SPECS_RAM: 512
|
||||||
|
IRONIC_VM_SPECS_DISK: 4
|
||||||
OVS_BRIDGE_MAPPINGS: 'mynetwork:sub1brbm,public:br-infra'
|
OVS_BRIDGE_MAPPINGS: 'mynetwork:sub1brbm,public:br-infra'
|
||||||
OVS_PHYSICAL_BRIDGE: sub1brbm
|
OVS_PHYSICAL_BRIDGE: sub1brbm
|
||||||
PHYSICAL_NETWORK: mynetwork
|
PHYSICAL_NETWORK: mynetwork
|
||||||
|
|
Loading…
Reference in New Issue