Browse Source

Merge "Combine zuul.artifacts lists in zuul_return"

changes/38/638038/1
Zuul 3 years ago
committed by Gerrit Code Review
parent
commit
6bc25035dd
  1. 4
      doc/source/user/jobs.rst
  2. 9
      tests/fixtures/config/sql-driver/git/common-config/playbooks/project-test1.yaml
  3. 21
      zuul/ansible/actiongeneral/zuul_return.py

4
doc/source/user/jobs.rst

@ -807,6 +807,10 @@ the **zuul.log_url** value if set to create an absolute URL. The
**metadata** key is optional; if it is provided, it must be a
dictionary; its keys and values may be anything.
If *zuul_return* is invoked multiple times (e.g., via multiple
playbooks), then the elements of **zuul.artifacts** from each
invocation will be appended.
Skipping child jobs
~~~~~~~~~~~~~~~~~~~

9
tests/fixtures/config/sql-driver/git/common-config/playbooks/project-test1.yaml

@ -1,5 +1,12 @@
- hosts: all
tasks:
- name: Return artifact data
zuul_return:
data:
zuul:
artifacts:
- name: relative
url: relative/docs
- name: Return artifact data
zuul_return:
data:
@ -11,5 +18,3 @@
url: http://example.com/tarball
- name: docs
url: http://example.com/docs
- name: relative
url: relative/docs

21
zuul/ansible/actiongeneral/zuul_return.py

@ -40,6 +40,23 @@ def merge_dict(dict_a, dict_b):
return dict_b
def merge_data(dict_a, dict_b):
"""
Merge dict_a into dict_b, handling any special cases for zuul variables
"""
artifacts_a = dict_a.get('zuul', {}).get('artifacts', [])
if not isinstance(artifacts_a, list):
artifacts_a = []
artifacts_b = dict_b.get('zuul', {}).get('artifacts', [])
if not isinstance(artifacts_b, list):
artifacts_b = []
artifacts = artifacts_a + artifacts_b
merge_dict(dict_a, dict_b)
if artifacts:
dict_b.setdefault('zuul', {})['artifacts'] = artifacts
return dict_b
def set_value(path, new_data, new_file):
workdir = os.path.dirname(path)
data = None
@ -53,9 +70,9 @@ def set_value(path, new_data, new_file):
if new_file:
with open(new_file, 'r') as f:
merge_dict(json.load(f), data)
merge_data(json.load(f), data)
if new_data:
merge_dict(new_data, data)
merge_data(new_data, data)
(f, tmp_path) = tempfile.mkstemp(dir=workdir)
try:

Loading…
Cancel
Save