linters refresh w/ afferent bugfixes

- removes ansible-lint as we have no ansible files here
- bumps linter versions
- fixes newly found errors
- return to more interesting work

Change-Id: Iaaf0a850c8429fe041a595028cbc31b9ff0fb9e5
Partial-Bug: #1878150
This commit is contained in:
Sorin Sbarnea 2020-05-12 09:42:53 +01:00 committed by Sorin Sbarnea (zbr)
parent 1fbea6a972
commit cb497e83f9
8 changed files with 35 additions and 53 deletions

View File

@ -1,11 +0,0 @@
---
parseable: true
skip_list:
- ANSIBLE0006
- ANSIBLE0012
- ANSIBLE0016
- '204' # [E204] Lines should be no longer than 120 chars
# E602 https://github.com/ansible/ansible-lint/issues/450
- '602' # [E602] Don't compare to empty string
# E405 is too new and we need to enable it in a separate change
- '405' # [E405] Remote package tasks should have a retry

View File

@ -1,7 +1,7 @@
---
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v2.2.3
rev: v2.5.0
hooks:
- id: trailing-whitespace
- id: mixed-line-ending
@ -12,21 +12,14 @@ repos:
- id: check-yaml
files: .*\.(yaml|yml)$
- repo: https://gitlab.com/pycqa/flake8.git
rev: '3.7.7'
rev: 3.8.1
hooks:
- id: flake8
- repo: https://github.com/ansible/ansible-lint
rev: v4.1.0a0
hooks:
- id: ansible-lint
files: \.(yaml|yml)$
exclude: ^workbooks/
entry: ansible-lint --force-color -v
- repo: https://github.com/openstack-dev/bashate.git
rev: 0.6.0
rev: 2.0.0
hooks:
- id: bashate
entry: bashate --error . --verbose --ignore=E006,E040,E042
entry: bashate --error . --ignore=E006,E040,E042
# Run bashate check for all bash scripts
# Ignores the following rules:
# E006: Line longer than 79 columns (as many scripts use jinja

View File

@ -79,9 +79,8 @@ commands =
[testenv:linters]
skip_install = true
deps =
# workaround for https://github.com/ansible/ansible-lint/issues/590
virtualenv==16.3.0 # 16.7.6 not working
pre-commit
virtualenv>=20.0.20
pre-commit>=2.4.0
commands =
python -m pre_commit run -a {posargs:}
bash -c tools/check_duplicate_jinja_blocks.sh

View File

@ -327,8 +327,8 @@ def parse_type_map_file(type_map_path):
content_type = None
type_map = {}
with open(type_map_path, 'r') as f:
for l in f:
line = l[:-1]
for x in f:
line = x[:-1]
if not line:
if uri and content_type:
type_map[content_type] = uri

View File

@ -762,10 +762,10 @@ class BaseImageUploader(object):
if manifest.get('schemaVersion', 2) == 1:
config = json.loads(manifest['history'][0]['v1Compatibility'])
layers = list(reversed([l['blobSum']
for l in manifest['fsLayers']]))
layers = list(reversed([x['blobSum']
for x in manifest['fsLayers']]))
else:
layers = [l['digest'] for l in manifest['layers']]
layers = [x['digest'] for x in manifest['layers']]
parts['digest'] = manifest['config']['digest']
config_headers = {
@ -1676,10 +1676,10 @@ class PythonImageUploader(BaseImageUploader):
manifests_str.append(manifest_str)
manifest = json.loads(manifest_str)
if manifest.get('schemaVersion', 2) == 1:
layers.extend(reversed([l['blobSum']
for l in manifest['fsLayers']]))
layers.extend(reversed([x['blobSum']
for x in manifest['fsLayers']]))
elif manifest.get('mediaType') == MEDIA_MANIFEST_V2:
layers.extend(l['digest'] for l in manifest['layers'])
layers.extend(x['digest'] for x in manifest['layers'])
elif manifest.get('mediaType') == MEDIA_MANIFEST_V2_LIST:
image, _, tag = image_url.geturl().rpartition(':')
for man in manifest.get('manifests', []):
@ -2052,24 +2052,24 @@ class PythonImageUploader(BaseImageUploader):
layer_found = None
# Check in global view or do a HEAD call for the supplied
# digests to see if the layer is already in the registry
for l in check_layers:
if not l:
for x in check_layers:
if not x:
continue
known_path, ref_image = image_utils.uploaded_layers_details(
cls._global_view_proxy(), l['digest'], scope='remote')
cls._global_view_proxy(), x['digest'], scope='remote')
if ref_image == norm_image:
LOG.debug('[%s] Layer %s already exists at %s' %
(image, l['digest'], known_path))
layer_found = l
(image, x['digest'], known_path))
layer_found = x
break
else:
parts['digest'] = l['digest']
parts['digest'] = x['digest']
blob_url = cls._build_url(
target_url, CALL_BLOB % parts)
if session.head(blob_url, timeout=30).status_code == 200:
LOG.debug('[%s] Layer already exists: %s' %
(image, l['digest']))
layer_found = l
(image, x['digest']))
layer_found = x
break
if layer_found:
layer['digest'] = layer_found['digest']
@ -2305,11 +2305,11 @@ class PythonImageUploader(BaseImageUploader):
def _get_all_local_layers_by_digest(cls):
all_layers = cls._containers_json('overlay-layers', 'layers.json')
layers_by_digest = {}
for l in all_layers:
if 'diff-digest' in l:
layers_by_digest[l['diff-digest']] = l
if 'compressed-diff-digest' in l:
layers_by_digest[l['compressed-diff-digest']] = l
for x in all_layers:
if 'diff-digest' in x:
layers_by_digest[x['diff-digest']] = x
if 'compressed-diff-digest' in x:
layers_by_digest[x['compressed-diff-digest']] = x
return layers_by_digest
@classmethod
@ -2384,7 +2384,7 @@ class PythonImageUploader(BaseImageUploader):
image, manifest, config_str = cls._image_manifest_config(name)
config = json.loads(config_str)
layers = [l['digest'] for l in manifest['layers']]
layers = [x['digest'] for x in manifest['layers']]
i, _ = cls._image_tag_from_url(image_url)
digest = image['digest']
created = image['created']

View File

@ -349,12 +349,12 @@ URI: sha256:1234abcd/index.json
if manifest.get('schemaVersion', 2) == 1:
config_str = None
manifest_type = image_uploader.MEDIA_MANIFEST_V1
layers = list(reversed([l['blobSum']
for l in manifest['fsLayers']]))
layers = list(reversed([x['blobSum']
for x in manifest['fsLayers']]))
else:
config_str = '{"config": {}}'
manifest_type = image_uploader.MEDIA_MANIFEST_V2
layers = [l['digest'] for l in manifest['layers']]
layers = [x['digest'] for x in manifest['layers']]
manifest_str = json.dumps(manifest)
calc_digest = hashlib.sha256()
calc_digest.update(manifest_str.encode('utf-8'))

View File

@ -1944,6 +1944,7 @@ class TestPythonImageUploader(base.TestCase):
'.manifest.v2+json'
}
)
@mock.patch('tripleo_common.image.image_uploader.'
'RegistrySessionHelper.check_status')
def test_upload_url(self, check_status):

View File

@ -74,14 +74,14 @@ def generate_role_with_colon_format(content, defined_role, generated_role):
# "Compute:Compute" is invalid format
if generated_role == defined_role:
msg = ("Generated role name cannot be same as existing role name (%s) "
msg = ("Generated role name cannot be same as existing role name ({}) "
"with colon format".format(defined_role))
raise ValueError(msg)
# "Compute:A" is invalid format
if not generated_role.startswith(defined_role):
msg = ("Generated role name (%s) name should start with existing role "
"name (%s)".format(generated_role, defined_role))
msg = ("Generated role name ({}) name should start with existing role "
"name ({})".format(generated_role, defined_role))
raise ValueError(msg)
name_line = "name:%s" % defined_role