Improve the way we log on the file system

Improve the way we log on the file system and share
the same uuid for callback logging and ansible
runner and artifact logging

This patch also adds zuul.d directory:
* Adds zuul.d/layout.yaml
* Adds .gitreview file
* Fixes ansible-runner requirements issue

Co-Authored-By: Gael Chamoulaud <gchamoul@redhat.com>
Change-Id: I4da8c1974362029754aa5b09cd8e6c7d6a27dd0d
Signed-off-by: Gael Chamoulaud (Strider) <gchamoul@redhat.com>
This commit is contained in:
Gael Chamoulaud (Strider) 2020-03-11 17:07:56 +01:00
parent 06892d8c65
commit 84aa2b6846
No known key found for this signature in database
GPG Key ID: 4119D0305C651D66
12 changed files with 176 additions and 111 deletions

4
.gitreview Normal file
View File

@ -0,0 +1,4 @@
[gerrit]
host=review.opendev.org
port=29418
project=openstack/validations-libs.git

View File

@ -1,6 +1,7 @@
alabaster==0.7.10 alabaster==0.7.10
ansible-lint==3.4.21 ansible-lint==3.4.21
ansible==2.4.3.0 ansible==2.4.3.0
ansible-runner==1.4.4
anyjson==0.3.3 anyjson==0.3.3
appdirs==1.4.3 appdirs==1.4.3
asn1crypto==0.24.0 asn1crypto==0.24.0

View File

@ -5,4 +5,4 @@
pbr>=3.1.1 # Apache-2.0 pbr>=3.1.1 # Apache-2.0
six>=1.11.0 # MIT six>=1.11.0 # MIT
ansible-runner ansible-runner>=1.4.4 # Apache-2.0

View File

@ -4,7 +4,8 @@
openstackdocstheme>=1.20.0 # Apache-2.0 openstackdocstheme>=1.20.0 # Apache-2.0
hacking<0.12,>=0.11.0 # Apache-2.0 hacking<0.12,>=0.11.0 # Apache-2.0
mock
mock>=2.0.0 # BSD
coverage!=4.4,>=4.0 # Apache-2.0 coverage!=4.4,>=4.0 # Apache-2.0
python-subunit>=1.0.0 # Apache-2.0/BSD python-subunit>=1.0.0 # Apache-2.0/BSD
sphinx>=1.8.0,<2.0.0;python_version=='2.7' # BSD sphinx>=1.8.0,<2.0.0;python_version=='2.7' # BSD
@ -12,5 +13,4 @@ sphinx>=1.8.0,!=2.1.0;python_version>='3.4' # BSD
testrepository>=0.0.18 # Apache-2.0/BSD testrepository>=0.0.18 # Apache-2.0/BSD
testscenarios>=0.4 # Apache-2.0/BSD testscenarios>=0.4 # Apache-2.0/BSD
testtools>=2.2.0 # MIT testtools>=2.2.0 # MIT
reno>=2.5.0 # Apache-2.0
pre-commit # MIT pre-commit # MIT

View File

@ -20,12 +20,10 @@ import os
import six import six
import sys import sys
import tempfile import tempfile
import uuid
import yaml import yaml
from six.moves import configparser from six.moves import configparser
from validations_libs import constants from validations_libs import constants
from validations_libs import utils
LOG = logging.getLogger(__name__ + ".ansible") LOG = logging.getLogger(__name__ + ".ansible")
@ -40,8 +38,9 @@ except NameError:
class Ansible(object): class Ansible(object):
def __init__(self): def __init__(self, uuid=None):
self.log = logging.getLogger(__name__ + ".Ansible") self.log = logging.getLogger(__name__ + ".Ansible")
self.uuid = uuid
def _playbook_check(self, play, playbook_dir=None): def _playbook_check(self, play, playbook_dir=None):
"""Check if playbook exist""" """Check if playbook exist"""
@ -216,6 +215,9 @@ class Ansible(object):
env['ANSIBLE_TRANSPORT'] = connection env['ANSIBLE_TRANSPORT'] = connection
env['ANSIBLE_CACHE_PLUGIN_TIMEOUT'] = 7200 env['ANSIBLE_CACHE_PLUGIN_TIMEOUT'] = 7200
if self.uuid:
env['ANSIBLE_UUID'] = self.uuid
if connection == 'local': if connection == 'local':
env['ANSIBLE_PYTHON_INTERPRETER'] = sys.executable env['ANSIBLE_PYTHON_INTERPRETER'] = sys.executable
@ -268,7 +270,7 @@ class Ansible(object):
gathering_policy='smart', gathering_policy='smart',
extra_env_variables=None, parallel_run=False, extra_env_variables=None, parallel_run=False,
callback_whitelist=None, ansible_cfg=None, callback_whitelist=None, ansible_cfg=None,
ansible_timeout=30): ansible_timeout=30, ansible_artifact_path=None):
if not playbook_dir: if not playbook_dir:
playbook_dir = workdir playbook_dir = workdir
@ -284,9 +286,8 @@ class Ansible(object):
) )
) )
ansible_fact_path = self._creates_ansible_fact_dir() # ansible_fact_path = self._creates_ansible_fact_dir()
extravars = self._get_extra_vars(extra_vars) extravars = self._get_extra_vars(extra_vars)
callback_whitelist = self._callback_whitelist(callback_whitelist, callback_whitelist = self._callback_whitelist(callback_whitelist,
output_callback) output_callback)
@ -295,36 +296,34 @@ class Ansible(object):
connection, gathering_policy, module_path, connection, gathering_policy, module_path,
key, extra_env_variables, ansible_timeout, key, extra_env_variables, ansible_timeout,
callback_whitelist) callback_whitelist)
if not ansible_artifact_path:
ansible_artifact_path = constants.VALIDATION_ANSIBLE_ARTIFACT_PATH
if 'ANSIBLE_CONFIG' not in env and not ansible_cfg:
ansible_cfg = os.path.join(ansible_artifact_path, 'ansible.cfg')
config = configparser.ConfigParser()
config.add_section('defaults')
config.set('defaults', 'internal_poll_interval', '0.05')
with open(ansible_cfg, 'w') as f:
config.write(f)
env['ANSIBLE_CONFIG'] = ansible_cfg
elif 'ANSIBLE_CONFIG' not in env and ansible_cfg:
env['ANSIBLE_CONFIG'] = ansible_cfg
command_path = None r_opts = {
'private_data_dir': workdir,
with utils.TempDirs(dir_path=constants.VALIDATION_RUN_LOG_PATH, 'project_dir': playbook_dir,
chdir=False,) as ansible_artifact_path: 'inventory': self._inventory(inventory, ansible_artifact_path),
if 'ANSIBLE_CONFIG' not in env and not ansible_cfg: 'envvars': self._encode_envvars(env=env),
ansible_cfg = os.path.join(ansible_artifact_path, 'ansible.cfg') 'playbook': playbook,
config = configparser.ConfigParser() 'verbosity': verbosity,
config.add_section('defaults') 'quiet': quiet,
config.set('defaults', 'internal_poll_interval', '0.05') 'extravars': extravars,
with open(ansible_cfg, 'w') as f: 'fact_cache': ansible_artifact_path,
config.write(f) 'fact_cache_type': 'jsonfile',
env['ANSIBLE_CONFIG'] = ansible_cfg 'artifact_dir': workdir,
elif 'ANSIBLE_CONFIG' not in env and ansible_cfg: 'rotate_artifacts': 256,
env['ANSIBLE_CONFIG'] = ansible_cfg 'ident': ''
}
r_opts = {
'private_data_dir': workdir,
'project_dir': playbook_dir,
'inventory': self._inventory(inventory, ansible_artifact_path),
'envvars': self._encode_envvars(env=env),
'playbook': playbook,
'verbosity': verbosity,
'quiet': quiet,
'extravars': extravars,
'fact_cache': ansible_fact_path,
'fact_cache_type': 'jsonfile',
'artifact_dir': ansible_artifact_path,
'rotate_artifacts': 256
}
if skip_tags: if skip_tags:
r_opts['skip_tags'] = skip_tags r_opts['skip_tags'] = skip_tags
@ -351,4 +350,4 @@ class Ansible(object):
runner = ansible_runner.Runner(config=runner_config) runner = ansible_runner.Runner(config=runner_config)
status, rc = runner.run() status, rc = runner.run()
return runner.stdout.name, playbook, rc, status return playbook, rc, status

View File

@ -23,3 +23,5 @@ VALIDATION_GROUPS = ['no-op',
'post'] 'post']
VALIDATION_RUN_LOG_PATH = '/var/lib/validations/logs' VALIDATION_RUN_LOG_PATH = '/var/lib/validations/logs'
VALIDATIONS_LOG_BASEDIR = '/var/logs/validations/'
VALIDATION_ANSIBLE_ARTIFACT_PATH = '/var/lib/validations/artifacts/'

View File

@ -15,7 +15,6 @@
import logging import logging
import os import os
import six
from validations_libs.ansible import Ansible as v_ansible from validations_libs.ansible import Ansible as v_ansible
from validations_libs import constants from validations_libs import constants
@ -32,7 +31,7 @@ class Run(object):
def run_validations(self, playbook=[], inventory='localhost', def run_validations(self, playbook=[], inventory='localhost',
group=None, extra_vars=None, validations_dir=None, group=None, extra_vars=None, validations_dir=None,
validation_name=None, extra_env_vars=None, validation_name=None, extra_env_vars=None,
ansible_cfg=None, quiet=True): ansible_cfg=None, quiet=True, workdir=None):
self.log = logging.getLogger(__name__ + ".run_validations") self.log = logging.getLogger(__name__ + ".run_validations")
@ -63,33 +62,35 @@ class Run(object):
raise("Please, use '--group' argument instead of " raise("Please, use '--group' argument instead of "
"'--validation' to run validation(s) by their " "'--validation' to run validation(s) by their "
"name(s)." "name(s)."
) )
else: else:
raise RuntimeError("No validations found") raise RuntimeError("No validations found")
run_ansible = v_ansible()
self.log.debug('Running the validations with Ansible') self.log.debug('Running the validations with Ansible')
results = [] results = []
with v_utils.TempDirs(chdir=False) as tmp: for playbook in playbooks:
for playbook in playbooks: validation_uuid, artifacts_dir = v_utils.create_artifacts_dir(
stdout_file, _playbook, _rc, _status = run_ansible.run( prefix=os.path.basename(playbook))
workdir=tmp, run_ansible = v_ansible(validation_uuid)
playbook=playbook, _playbook, _rc, _status = run_ansible.run(
playbook_dir=(validations_dir if workdir=artifacts_dir,
validations_dir else playbook=playbook,
constants.ANSIBLE_VALIDATION_DIR), playbook_dir=(validations_dir if
parallel_run=True, validations_dir else
inventory=inventory, constants.ANSIBLE_VALIDATION_DIR),
output_callback='validation_json', parallel_run=True,
quiet=quiet, inventory=inventory,
extra_vars=extra_vars, output_callback='validation_json',
extra_env_variables=extra_env_vars, quiet=quiet,
ansible_cfg=ansible_cfg, extra_vars=extra_vars,
gathering_policy='explicit') extra_env_variables=extra_env_vars,
ansible_cfg=ansible_cfg,
gathering_policy='explicit',
ansible_artifact_path=artifacts_dir)
results.append({'validation': { results.append({'validation': {
'playbook': _playbook, 'playbook': _playbook,
'rc_code': _rc, 'rc_code': _rc,
'status': _status, 'status': _status,
'stdout_file': stdout_file 'validation_id': validation_uuid
}}) }})
return results return results

View File

@ -13,8 +13,6 @@
# under the License. # under the License.
# #
from unittest import mock
VALIDATIONS_LIST = [{ VALIDATIONS_LIST = [{
'description': 'My Validation One Description', 'description': 'My Validation One Description',
'groups': ['prep', 'pre-deployment'], 'groups': ['prep', 'pre-deployment'],

View File

@ -19,7 +19,6 @@ from unittest import TestCase
from ansible_runner import Runner from ansible_runner import Runner
from validations_libs.ansible import Ansible from validations_libs.ansible import Ansible
from validations_libs.tests import fakes from validations_libs.tests import fakes
from validations_libs import utils
class TestAnsible(TestCase): class TestAnsible(TestCase):
@ -44,7 +43,7 @@ class TestAnsible(TestCase):
) )
mock_exists.assert_called_with('/tmp/non-existing.yaml') mock_exists.assert_called_with('/tmp/non-existing.yaml')
@mock.patch('tempfile.mkdtemp', return_value='/tmp/') @mock.patch('six.moves.builtins.open')
@mock.patch('os.path.exists', return_value=True) @mock.patch('os.path.exists', return_value=True)
@mock.patch('os.makedirs') @mock.patch('os.makedirs')
@mock.patch.object( @mock.patch.object(
@ -55,32 +54,29 @@ class TestAnsible(TestCase):
) )
@mock.patch('ansible_runner.utils.dump_artifact', autospec=True, @mock.patch('ansible_runner.utils.dump_artifact', autospec=True,
return_value="/foo/inventory.yaml") return_value="/foo/inventory.yaml")
@mock.patch('ansible_runner.runner.Runner.stdout', autospec=True, @mock.patch('ansible_runner.runner_config.RunnerConfig')
return_value="/tmp/foo.yaml") def test_ansible_runner_error(self, mock_config, mock_dump_artifact,
def test_ansible_runner_error(self, mock_stdout, mock_dump_artifact,
mock_run, mock_mkdirs, mock_exists, mock_run, mock_mkdirs, mock_exists,
mock_mkdtemp): mock_open):
stdout_file, _playbook, _rc, _status = self.run.run('existing.yaml', _playbook, _rc, _status = self.run.run('existing.yaml',
'localhost,', 'localhost,',
'/tmp') '/tmp')
self.assertEquals((_playbook, _rc, _status), self.assertEquals((_playbook, _rc, _status),
('existing.yaml', 1, 'failed')) ('existing.yaml', 1, 'failed'))
@mock.patch('tempfile.mkdtemp', return_value='/tmp/') @mock.patch('six.moves.builtins.open')
@mock.patch('os.path.exists', return_value=True) @mock.patch('os.path.exists', return_value=True)
@mock.patch('os.makedirs') @mock.patch('os.makedirs')
@mock.patch.object(Runner, 'run', @mock.patch.object(Runner, 'run',
return_value=fakes.fake_ansible_runner_run_return(rc=0) return_value=fakes.fake_ansible_runner_run_return(rc=0))
)
@mock.patch('ansible_runner.utils.dump_artifact', autospec=True, @mock.patch('ansible_runner.utils.dump_artifact', autospec=True,
return_value="/foo/inventory.yaml") return_value="/foo/inventory.yaml")
@mock.patch('ansible_runner.runner.Runner.stdout', autospec=True, @mock.patch('ansible_runner.runner_config.RunnerConfig')
return_value="/tmp/foo.yaml") def test_run_success_default(self, mock_config, mock_dump_artifact,
def test_run_success_default(self, mock_stdout, mock_dump_artifact,
mock_run, mock_mkdirs, mock_exists, mock_run, mock_mkdirs, mock_exists,
mock_mkstemp): mock_open):
stdout_file, _playbook, _rc, _status = self.run.run( _playbook, _rc, _status = self.run.run(
playbook='existing.yaml', playbook='existing.yaml',
inventory='localhost,', inventory='localhost,',
workdir='/tmp' workdir='/tmp'
@ -88,21 +84,19 @@ class TestAnsible(TestCase):
self.assertEquals((_playbook, _rc, _status), self.assertEquals((_playbook, _rc, _status),
('existing.yaml', 0, 'successful')) ('existing.yaml', 0, 'successful'))
@mock.patch('tempfile.mkdtemp', return_value='/tmp/') @mock.patch('six.moves.builtins.open')
@mock.patch('os.path.exists', return_value=True) @mock.patch('os.path.exists', return_value=True)
@mock.patch('os.makedirs') @mock.patch('os.makedirs')
@mock.patch.object(Runner, 'run', @mock.patch.object(Runner, 'run',
return_value=fakes.fake_ansible_runner_run_return(rc=0) return_value=fakes.fake_ansible_runner_run_return(rc=0))
)
@mock.patch('ansible_runner.utils.dump_artifact', autospec=True, @mock.patch('ansible_runner.utils.dump_artifact', autospec=True,
return_value="/foo/inventory.yaml") return_value="/foo/inventory.yaml")
@mock.patch('ansible_runner.runner.Runner.stdout', autospec=True, @mock.patch('ansible_runner.runner_config.RunnerConfig')
return_value="/tmp/foo.yaml") def test_run_success_gathering_policy(self, mock_config,
def test_run_success_gathering_policy(self, mock_stdout,
mock_dump_artifact, mock_run, mock_dump_artifact, mock_run,
mock_mkdirs, mock_exists, mock_mkdirs, mock_exists,
mock_mkstemp): mock_open):
stdout_file, _playbook, _rc, _status = self.run.run( _playbook, _rc, _status = self.run.run(
playbook='existing.yaml', playbook='existing.yaml',
inventory='localhost,', inventory='localhost,',
workdir='/tmp', workdir='/tmp',
@ -112,21 +106,19 @@ class TestAnsible(TestCase):
self.assertEquals((_playbook, _rc, _status), self.assertEquals((_playbook, _rc, _status),
('existing.yaml', 0, 'successful')) ('existing.yaml', 0, 'successful'))
@mock.patch('tempfile.mkdtemp', return_value='/tmp/')
@mock.patch('os.path.exists', return_value=True) @mock.patch('os.path.exists', return_value=True)
@mock.patch('os.makedirs') @mock.patch('os.makedirs')
@mock.patch.object(Runner, 'run', @mock.patch.object(Runner, 'run',
return_value=fakes.fake_ansible_runner_run_return(rc=0) return_value=fakes.fake_ansible_runner_run_return(rc=0))
)
@mock.patch('ansible_runner.utils.dump_artifact', autospec=True, @mock.patch('ansible_runner.utils.dump_artifact', autospec=True,
return_value="/foo/inventory.yaml") return_value="/foo/inventory.yaml")
@mock.patch('ansible_runner.runner.Runner.stdout', autospec=True, @mock.patch('six.moves.builtins.open')
return_value="/tmp/foo.yaml") @mock.patch('ansible_runner.runner_config.RunnerConfig')
def test_run_success_local(self, mock_stdout, def test_run_success_local(self, mock_config, mock_open,
mock_dump_artifact, mock_run, mock_dump_artifact, mock_run,
mock_mkdirs, mock_exists, mock_mkdirs, mock_exists
mock_mkstemp): ):
stdout_file, _playbook, _rc, _status = self.run.run( _playbook, _rc, _status = self.run.run(
playbook='existing.yaml', playbook='existing.yaml',
inventory='localhost,', inventory='localhost,',
workdir='/tmp', workdir='/tmp',

View File

@ -27,7 +27,9 @@ class TestValidatorRun(TestCase):
@mock.patch('validations_libs.utils.parse_all_validations_on_disk') @mock.patch('validations_libs.utils.parse_all_validations_on_disk')
@mock.patch('validations_libs.ansible.Ansible.run') @mock.patch('validations_libs.ansible.Ansible.run')
def test_validation_run_success(self, mock_ansible_run, @mock.patch('validations_libs.utils.create_artifacts_dir',
return_value=('1234', '/tmp/'))
def test_validation_run_success(self, mock_tmp, mock_ansible_run,
mock_validation_dir): mock_validation_dir):
mock_validation_dir.return_value = [{ mock_validation_dir.return_value = [{
'description': 'My Validation One Description', 'description': 'My Validation One Description',
@ -35,14 +37,17 @@ class TestValidatorRun(TestCase):
'id': 'foo', 'id': 'foo',
'name': 'My Validition One Name', 'name': 'My Validition One Name',
'parameters': {}}] 'parameters': {}}]
mock_ansible_run.return_value = ('/tmp/validation/stdout.log', mock_ansible_run.return_value = ('foo.yaml', 0, 'successful')
'foo.yaml', 0, 'successful')
expected_run_return = [ expected_run_return = [
{'validation': {'playbook': 'foo.yaml', {'validation': {'playbook': 'foo.yaml',
'rc_code': 0, 'rc_code': 0,
'status': 'successful', 'status': 'successful',
'stdout_file': '/tmp/validation/stdout.log'}}] 'validation_id': '1234'}},
{'validation': {'playbook': 'foo.yaml',
'rc_code': 0,
'status': 'successful',
'validation_id': '1234'}}]
playbook = ['fake.yaml'] playbook = ['fake.yaml']
inventory = 'tmp/inventory.yaml' inventory = 'tmp/inventory.yaml'
@ -55,7 +60,9 @@ class TestValidatorRun(TestCase):
@mock.patch('validations_libs.utils.parse_all_validations_on_disk') @mock.patch('validations_libs.utils.parse_all_validations_on_disk')
@mock.patch('validations_libs.ansible.Ansible.run') @mock.patch('validations_libs.ansible.Ansible.run')
def test_validation_run_failed(self, mock_ansible_run, @mock.patch('validations_libs.utils.create_artifacts_dir',
return_value=('1234', '/tmp/'))
def test_validation_run_failed(self, mock_tmp, mock_ansible_run,
mock_validation_dir): mock_validation_dir):
mock_validation_dir.return_value = [{ mock_validation_dir.return_value = [{
'description': 'My Validation One Description', 'description': 'My Validation One Description',
@ -63,14 +70,17 @@ class TestValidatorRun(TestCase):
'id': 'foo', 'id': 'foo',
'name': 'My Validition One Name', 'name': 'My Validition One Name',
'parameters': {}}] 'parameters': {}}]
mock_ansible_run.return_value = ('/tmp/validation/stdout.log', mock_ansible_run.return_value = ('foo.yaml', 0, 'failed')
'foo.yaml', 0, 'failed')
expected_run_return = [ expected_run_return = [
{'validation': {'playbook': 'foo.yaml', {'validation': {'playbook': 'foo.yaml',
'rc_code': 0, 'rc_code': 0,
'status': 'failed', 'status': 'failed',
'stdout_file': '/tmp/validation/stdout.log'}}] 'validation_id': '1234'}},
{'validation': {'playbook': 'foo.yaml',
'rc_code': 0,
'status': 'failed',
'validation_id': '1234'}}]
playbook = ['fake.yaml'] playbook = ['fake.yaml']
inventory = 'tmp/inventory.yaml' inventory = 'tmp/inventory.yaml'

View File

@ -12,6 +12,7 @@
# License for the specific language governing permissions and limitations # License for the specific language governing permissions and limitations
# under the License. # under the License.
# #
import datetime
import glob import glob
import json import json
import logging import logging
@ -22,6 +23,7 @@ import tempfile
import yaml import yaml
from validations_libs import constants from validations_libs import constants
from uuid import uuid4
RED = "\033[1;31m" RED = "\033[1;31m"
GREEN = "\033[0;32m" GREEN = "\033[0;32m"
@ -125,12 +127,35 @@ class TempDirs(object):
LOG.info("Temporary directory [ %s ] cleaned up" % self.dir) LOG.info("Temporary directory [ %s ] cleaned up" % self.dir)
def current_time():
return '%sZ' % datetime.datetime.utcnow().isoformat()
def create_artifacts_dir(dir_path=None, prefix=None):
dir_path = (dir_path if dir_path else
constants.VALIDATION_ANSIBLE_ARTIFACT_PATH)
validation_uuid = str(uuid4())
log_dir = "{}/{}_{}_{}".format(dir_path, validation_uuid,
(prefix if prefix else ''), current_time())
try:
os.makedirs(log_dir)
return validation_uuid, log_dir
except OSError:
LOG.exception("Error while creating Ansible artifacts log file."
"Please check the access rights for {}").format(log_dir)
def parse_all_validations_on_disk(path, groups=None): def parse_all_validations_on_disk(path, groups=None):
results = [] results = []
validations_abspath = glob.glob("{path}/*.yaml".format(path=path)) validations_abspath = glob.glob("{path}/*.yaml".format(path=path))
if isinstance(groups, six.string_types):
group_list = []
group_list.append(groups)
groups = group_list
for pl in validations_abspath: for pl in validations_abspath:
validation_id, ext = os.path.splitext(os.path.basename(pl)) validation_id, _ext = os.path.splitext(os.path.basename(pl))
with open(pl, 'r') as val_playbook: with open(pl, 'r') as val_playbook:
contents = yaml.safe_load(val_playbook) contents = yaml.safe_load(val_playbook)
@ -167,6 +192,27 @@ def parse_all_validation_groups_on_disk(groups_file_path=None):
return results return results
def parse_all_validations_logs_on_disk(uuid_run=None, validation_id=None):
results = []
path = constants.VALIDATIONS_LOG_BASEDIR
logfile = "{}/*.json".format(path)
if validation_id:
logfile = "{}/*_{}_*.json".format(path, validation_id)
if uuid_run:
logfile = "{}/*_{}_*.json".format(path, uuid_run)
logfiles_path = glob.glob(logfile)
for logfile_path in logfiles_path:
with open(logfile_path, 'r') as log:
contents = json.load(log)
results.append(contents)
return results
def get_validation_metadata(validation, key): def get_validation_metadata(validation, key):
default_metadata = { default_metadata = {
'name': 'Unnamed', 'name': 'Unnamed',
@ -224,11 +270,11 @@ def get_validation_group_name_list():
return results return results
def get_new_validations_logs_on_disk(): def get_new_validations_logs_on_disk(validations_logs_dir):
"""Return a list of new log execution filenames """ """Return a list of new log execution filenames """
files = [] files = []
for root, dirs, filenames in os.walk(constants.VALIDATIONS_LOG_BASEDIR): for root, dirs, filenames in os.walk(validations_logs_dir):
files = [ files = [
f for f in filenames if not f.startswith('processed') f for f in filenames if not f.startswith('processed')
and os.path.splitext(f)[1] == '.json' and os.path.splitext(f)[1] == '.json'

12
zuul.d/layout.yaml Normal file
View File

@ -0,0 +1,12 @@
- project:
templates:
- openstack-python3-ussuri-jobs
- check-requirements
check:
jobs:
- openstack-tox-linters
- openstack-tox-lower-constraints
gate:
jobs:
- openstack-tox-linters
- openstack-tox-lower-constraints