Fix flake 3.6.0 warnings

flake 3.6.0 introduces a couple of new tests, handle them in the zuul
base:

* Disable "W504 line break after binary operator", this is a new warning
  with different coding style.
* Fix "F841 local variable 'e' is assigned to but never used"
* Fix "W605 invalid escape sequence" - use raw strings for regexes.
* Fix "F901 'raise NotImplemented' should be 'raise
  NotImplementedError'"
* Ignore "E252 missing whitespace around parameter equals" since it
  reports on parameters like:
  def makeNewJobs(self, old_job, parent: Job=None):

Change "flake8: noqa" to "noqa" since "flake8: noqa" is a file level
noqa and gets ignored with flake 3.6.0 if it's not at beginning of line
- this results in many warnings for files ./zuul/driver/bubblewrap/__init__.py and
./zuul/cmd/migrate.py. Fix any issues there.

Change-Id: Ia79bbc8ac0cd8e4819f61bda0091f4398464c5dc
This commit is contained in:
Andreas Jaeger 2018-10-28 15:29:06 +01:00
parent f856841c49
commit d9059524e0
14 changed files with 70 additions and 68 deletions

View File

@ -1,4 +1,4 @@
flake8<3.6.0 flake8
coverage>=3.6 coverage>=3.6
sphinx>=1.6.1 sphinx>=1.6.1

View File

@ -2715,7 +2715,7 @@ class ZuulTestCase(BaseTestCase):
content = f.read() content = f.read()
# dynamically create symlinks if the content is of the form # dynamically create symlinks if the content is of the form
# symlink: <target> # symlink: <target>
match = re.match(b'symlink: ([^\s]+)', content) match = re.match(rb'symlink: ([^\s]+)', content)
if match: if match:
content = SymLink(match.group(1)) content = SymLink(match.group(1))

View File

@ -76,8 +76,8 @@ class TestZuulStream(AnsibleZuulTestCase):
return f.read() return f.read()
def assertLogLine(self, line, log): def assertLogLine(self, line, log):
pattern = ('^\d\d\d\d-\d\d-\d\d \d\d:\d\d\:\d\d\.\d\d\d\d\d\d \| %s$' % pattern = (r'^\d\d\d\d-\d\d-\d\d \d\d:\d\d\:\d\d\.\d\d\d\d\d\d \| %s$'
line) % line)
log_re = re.compile(pattern, re.MULTILINE) log_re = re.compile(pattern, re.MULTILINE)
m = log_re.search(log) m = log_re.search(log)
if m is None: if m is None:
@ -91,45 +91,45 @@ class TestZuulStream(AnsibleZuulTestCase):
text = self._get_job_output(build) text = self._get_job_output(build)
self.assertLogLine( self.assertLogLine(
'RUN START: \[untrusted : review.example.com/org/project/' r'RUN START: \[untrusted : review.example.com/org/project/'
'playbooks/command.yaml@master\]', text) r'playbooks/command.yaml@master\]', text)
self.assertLogLine('PLAY \[all\]', text) self.assertLogLine(r'PLAY \[all\]', text)
self.assertLogLine('TASK \[Show contents of first file\]', text) self.assertLogLine(r'TASK \[Show contents of first file\]', text)
self.assertLogLine('controller \| command test one', text) self.assertLogLine(r'controller \| command test one', text)
self.assertLogLine( self.assertLogLine(
'controller \| ok: Runtime: \d:\d\d:\d\d\.\d\d\d\d\d\d', text) r'controller \| ok: Runtime: \d:\d\d:\d\d\.\d\d\d\d\d\d', text)
self.assertLogLine('TASK \[Show contents of second file\]', text) self.assertLogLine(r'TASK \[Show contents of second file\]', text)
self.assertLogLine('compute1 \| command test two', text) self.assertLogLine(r'compute1 \| command test two', text)
self.assertLogLine('controller \| command test two', text) self.assertLogLine(r'controller \| command test two', text)
self.assertLogLine('compute1 \| This is a rescue task', text) self.assertLogLine(r'compute1 \| This is a rescue task', text)
self.assertLogLine('controller \| This is a rescue task', text) self.assertLogLine(r'controller \| This is a rescue task', text)
self.assertLogLine('compute1 \| This is an always task', text) self.assertLogLine(r'compute1 \| This is an always task', text)
self.assertLogLine('controller \| This is an always task', text) self.assertLogLine(r'controller \| This is an always task', text)
self.assertLogLine('compute1 \| This is a handler', text) self.assertLogLine(r'compute1 \| This is a handler', text)
self.assertLogLine('controller \| This is a handler', text) self.assertLogLine(r'controller \| This is a handler', text)
self.assertLogLine('controller \| First free task', text) self.assertLogLine(r'controller \| First free task', text)
self.assertLogLine('controller \| Second free task', text) self.assertLogLine(r'controller \| Second free task', text)
self.assertLogLine('controller \| This is a shell task after an ' self.assertLogLine(r'controller \| This is a shell task after an '
'included role', text) 'included role', text)
self.assertLogLine('compute1 \| This is a shell task after an ' self.assertLogLine(r'compute1 \| This is a shell task after an '
'included role', text) 'included role', text)
self.assertLogLine('controller \| This is a command task after an ' self.assertLogLine(r'controller \| This is a command task after '
'an included role', text)
self.assertLogLine(r'compute1 \| This is a command task after an '
'included role', text) 'included role', text)
self.assertLogLine('compute1 \| This is a command task after an ' self.assertLogLine(r'controller \| This is a shell task with '
'included role', text)
self.assertLogLine('controller \| This is a shell task with '
'delegate compute1', text) 'delegate compute1', text)
self.assertLogLine('controller \| This is a shell task with ' self.assertLogLine(r'controller \| This is a shell task with '
'delegate controller', text) 'delegate controller', text)
self.assertLogLine( self.assertLogLine(
'controller \| ok: Runtime: \d:\d\d:\d\d\.\d\d\d\d\d\d', text) r'controller \| ok: Runtime: \d:\d\d:\d\d\.\d\d\d\d\d\d', text)
self.assertLogLine('PLAY RECAP', text) self.assertLogLine('PLAY RECAP', text)
self.assertLogLine( self.assertLogLine(
'controller \| ok: \d+ changed: \d+ unreachable: 0 failed: 1', r'controller \| ok: \d+ changed: \d+ unreachable: 0 failed: 1',
text) text)
self.assertLogLine( self.assertLogLine(
'RUN END RESULT_NORMAL: \[untrusted : review.example.com/' r'RUN END RESULT_NORMAL: \[untrusted : review.example.com/'
'org/project/playbooks/command.yaml@master]', text) r'org/project/playbooks/command.yaml@master]', text)
def test_module_failure(self): def test_module_failure(self):
job = self._run_job('module_failure') job = self._run_job('module_failure')
@ -138,6 +138,6 @@ class TestZuulStream(AnsibleZuulTestCase):
self.assertEqual(build.result, 'FAILURE') self.assertEqual(build.result, 'FAILURE')
text = self._get_job_output(build) text = self._get_job_output(build)
self.assertLogLine('TASK \[Module failure\]', text) self.assertLogLine(r'TASK \[Module failure\]', text)
self.assertLogLine( self.assertLogLine(
'controller \| MODULE FAILURE: This module is broken', text) r'controller \| MODULE FAILURE: This module is broken', text)

View File

@ -54,10 +54,10 @@ class TestGithubDriver(ZuulTestCase):
self.assertEqual(1, len(A.comments)) self.assertEqual(1, len(A.comments))
self.assertThat( self.assertThat(
A.comments[0], A.comments[0],
MatchesRegex('.*\[project-test1 \]\(.*\).*', re.DOTALL)) MatchesRegex(r'.*\[project-test1 \]\(.*\).*', re.DOTALL))
self.assertThat( self.assertThat(
A.comments[0], A.comments[0],
MatchesRegex('.*\[project-test2 \]\(.*\).*', re.DOTALL)) MatchesRegex(r'.*\[project-test2 \]\(.*\).*', re.DOTALL))
self.assertEqual(2, len(self.history)) self.assertEqual(2, len(self.history))
# test_pull_unmatched_branch_event(self): # test_pull_unmatched_branch_event(self):
@ -389,7 +389,7 @@ class TestGithubDriver(ZuulTestCase):
self.assertEqual(check_url, check_status['url']) self.assertEqual(check_url, check_status['url'])
self.assertEqual(1, len(A.comments)) self.assertEqual(1, len(A.comments))
self.assertThat(A.comments[0], self.assertThat(A.comments[0],
MatchesRegex('.*Build succeeded.*', re.DOTALL)) MatchesRegex(r'.*Build succeeded.*', re.DOTALL))
# pipeline does not report any status but does comment # pipeline does not report any status but does comment
self.executor_server.hold_jobs_in_build = True self.executor_server.hold_jobs_in_build = True
@ -401,7 +401,8 @@ class TestGithubDriver(ZuulTestCase):
# comments increased by one for the start message # comments increased by one for the start message
self.assertEqual(2, len(A.comments)) self.assertEqual(2, len(A.comments))
self.assertThat(A.comments[1], self.assertThat(A.comments[1],
MatchesRegex('.*Starting reporting jobs.*', re.DOTALL)) MatchesRegex(r'.*Starting reporting jobs.*',
re.DOTALL))
self.executor_server.hold_jobs_in_build = False self.executor_server.hold_jobs_in_build = False
self.executor_server.release() self.executor_server.release()
self.waitUntilSettled() self.waitUntilSettled()
@ -427,7 +428,7 @@ class TestGithubDriver(ZuulTestCase):
# The rest of the URL is a UUID and a trailing slash. # The rest of the URL is a UUID and a trailing slash.
self.assertThat(report_status['url'][len(base):], self.assertThat(report_status['url'][len(base):],
MatchesRegex('^[a-fA-F0-9]{32}\/$')) MatchesRegex(r'^[a-fA-F0-9]{32}\/$'))
@simple_layout('layouts/reporting-github.yaml', driver='github') @simple_layout('layouts/reporting-github.yaml', driver='github')
def test_truncated_status_description(self): def test_truncated_status_description(self):
@ -511,7 +512,7 @@ class TestGithubDriver(ZuulTestCase):
self.waitUntilSettled() self.waitUntilSettled()
self.assertTrue(A.is_merged) self.assertTrue(A.is_merged)
self.assertThat(A.merge_message, self.assertThat(A.merge_message,
MatchesRegex('.*PR title.*Reviewed-by.*', re.DOTALL)) MatchesRegex(r'.*PR title.*Reviewed-by.*', re.DOTALL))
# pipeline merges the pull request on success after failure # pipeline merges the pull request on success after failure
self.fake_github.merge_failure = True self.fake_github.merge_failure = True
@ -583,7 +584,7 @@ class TestGithubDriver(ZuulTestCase):
self.assertEqual(check_url, check_status['url']) self.assertEqual(check_url, check_status['url'])
self.assertEqual(1, len(A.comments)) self.assertEqual(1, len(A.comments))
self.assertThat(A.comments[0], self.assertThat(A.comments[0],
MatchesRegex('.*Build succeeded.*', re.DOTALL)) MatchesRegex(r'.*Build succeeded.*', re.DOTALL))
@simple_layout('layouts/dependent-github.yaml', driver='github') @simple_layout('layouts/dependent-github.yaml', driver='github')
def test_parallel_changes(self): def test_parallel_changes(self):
@ -1126,10 +1127,10 @@ class TestGithubWebhook(ZuulTestCase):
self.assertEqual(1, len(A.comments)) self.assertEqual(1, len(A.comments))
self.assertThat( self.assertThat(
A.comments[0], A.comments[0],
MatchesRegex('.*\[project-test1 \]\(.*\).*', re.DOTALL)) MatchesRegex(r'.*\[project-test1 \]\(.*\).*', re.DOTALL))
self.assertThat( self.assertThat(
A.comments[0], A.comments[0],
MatchesRegex('.*\[project-test2 \]\(.*\).*', re.DOTALL)) MatchesRegex(r'.*\[project-test2 \]\(.*\).*', re.DOTALL))
self.assertEqual(2, len(self.history)) self.assertEqual(2, len(self.history))
# test_pull_unmatched_branch_event(self): # test_pull_unmatched_branch_event(self):

View File

@ -118,7 +118,7 @@ class TestMergerRepo(ZuulTestCase):
# exceptions, including this one on initialization. For the # exceptions, including this one on initialization. For the
# test, we try cloning again. # test, we try cloning again.
with testtools.ExpectedException(git.exc.GitCommandError, with testtools.ExpectedException(git.exc.GitCommandError,
'.*exit code\(-9\)'): r'.*exit code\(-9\)'):
work_repo._ensure_cloned() work_repo._ensure_cloned()
def test_fetch_timeout(self): def test_fetch_timeout(self):
@ -130,7 +130,7 @@ class TestMergerRepo(ZuulTestCase):
self.patch(git.Git, 'GIT_PYTHON_GIT_EXECUTABLE', self.patch(git.Git, 'GIT_PYTHON_GIT_EXECUTABLE',
os.path.join(FIXTURE_DIR, 'fake_git.sh')) os.path.join(FIXTURE_DIR, 'fake_git.sh'))
with testtools.ExpectedException(git.exc.GitCommandError, with testtools.ExpectedException(git.exc.GitCommandError,
'.*exit code\(-9\)'): r'.*exit code\(-9\)'):
work_repo.update() work_repo.update()
def test_fetch_retry(self): def test_fetch_retry(self):

View File

@ -36,7 +36,7 @@ class TestWebURLs(ZuulTestCase):
req = urllib.request.Request(url) req = urllib.request.Request(url)
try: try:
f = urllib.request.urlopen(req) f = urllib.request.urlopen(req)
except urllib.error.HTTPError as e: except urllib.error.HTTPError:
raise Exception("Error on URL {}".format(url)) raise Exception("Error on URL {}".format(url))
return f.read() return f.read()

View File

@ -82,6 +82,6 @@ install_command = {[nodeenv]install_command}
[flake8] [flake8]
# These are ignored intentionally in openstack-infra projects; # These are ignored intentionally in openstack-infra projects;
# please don't submit patches that solely correct them or enable them. # please don't submit patches that solely correct them or enable them.
ignore = E124,E125,E129,E402,E741,H,W503 ignore = E124,E125,E129,E252,E402,E741,H,W503,W504
show-source = True show-source = True
exclude = .venv,.tox,dist,doc,build,*.egg,node_modules exclude = .venv,.tox,dist,doc,build,*.egg,node_modules

View File

@ -223,7 +223,7 @@ def get_pid_from_inode(inode):
try: try:
try: try:
int(d) int(d)
except Exception as e: except Exception:
continue continue
d_abs_path = os.path.join('/proc', d) d_abs_path = os.path.join('/proc', d)
if os.stat(d_abs_path).st_uid != my_euid: if os.stat(d_abs_path).st_uid != my_euid:

View File

@ -29,11 +29,10 @@ import itertools
import getopt import getopt
import logging import logging
import os import os
import operator
import subprocess import subprocess
import tempfile import tempfile
import re import re
from typing import Any, Dict, List, Optional # flake8: noqa from typing import Any, Dict, List, Optional
import jenkins_jobs.builder import jenkins_jobs.builder
from jenkins_jobs.formatter import deep_format from jenkins_jobs.formatter import deep_format
@ -47,7 +46,7 @@ TEMPLATES_TO_EXPAND = {} # type: Dict[str, List]
JOBS_FOR_EXPAND = collections.defaultdict(dict) # type: ignore JOBS_FOR_EXPAND = collections.defaultdict(dict) # type: ignore
JOBS_BY_ORIG_TEMPLATE = {} # type: ignore JOBS_BY_ORIG_TEMPLATE = {} # type: ignore
SUFFIXES = [] # type: ignore SUFFIXES = [] # type: ignore
SKIP_MACROS = [] # type: ignore SKIP_MACROS = [] # type: ignore
ENVIRONMENT = '{{ zuul | zuul_legacy_vars }}' ENVIRONMENT = '{{ zuul | zuul_legacy_vars }}'
DESCRIPTION = """Migrate zuul v2 and Jenkins Job Builder to Zuul v3. DESCRIPTION = """Migrate zuul v2 and Jenkins Job Builder to Zuul v3.
@ -57,6 +56,7 @@ optional mapping config can be given that defines how to map old jobs
to new jobs. to new jobs.
""" """
def deal_with_shebang(data): def deal_with_shebang(data):
# Ansible shell blocks do not honor shebang lines. That's fine - but # Ansible shell blocks do not honor shebang lines. That's fine - but
# we do have a bunch of scripts that have either nothing, -x, -xe, # we do have a bunch of scripts that have either nothing, -x, -xe,
@ -222,7 +222,7 @@ def normalize_project_expansions():
# from : # from :
# http://stackoverflow.com/questions/8640959/how-can-i-control-what-scalar-form-pyyaml-uses-for-my-data flake8: noqa # http://stackoverflow.com/questions/8640959/how-can-i-control-what-scalar-form-pyyaml-uses-for-my-data # noqa
def should_use_block(value): def should_use_block(value):
for c in u"\u000a\u000d\u001c\u001d\u001e\u0085\u2028\u2029": for c in u"\u000a\u000d\u001c\u001d\u001e\u0085\u2028\u2029":
if c in value: if c in value:
@ -233,7 +233,7 @@ def should_use_block(value):
def my_represent_scalar(self, tag, value, style=None): def my_represent_scalar(self, tag, value, style=None):
if style is None: if style is None:
if should_use_block(value): if should_use_block(value):
style='|' style = '|'
else: else:
style = self.default_style style = self.default_style
@ -242,6 +242,7 @@ def my_represent_scalar(self, tag, value, style=None):
self.represented_objects[self.alias_key] = node self.represented_objects[self.alias_key] = node
return node return node
def project_representer(dumper, data): def project_representer(dumper, data):
return dumper.represent_mapping('tag:yaml.org,2002:map', return dumper.represent_mapping('tag:yaml.org,2002:map',
data.items()) data.items())
@ -439,6 +440,7 @@ def expandYamlForTemplateJob(self, project, template, jobs_glob=None):
self.jobs.append(expanded) self.jobs.append(expanded)
JOBS_BY_ORIG_TEMPLATE[templated_job_name] = expanded JOBS_BY_ORIG_TEMPLATE[templated_job_name] = expanded
jenkins_jobs.parser.YamlParser.expandYamlForTemplateJob = \ jenkins_jobs.parser.YamlParser.expandYamlForTemplateJob = \
expandYamlForTemplateJob expandYamlForTemplateJob
@ -729,7 +731,7 @@ class Job:
ensure_task['name'] = 'Ensure artifacts directory exists' ensure_task['name'] = 'Ensure artifacts directory exists'
ensure_task['file'] = collections.OrderedDict() ensure_task['file'] = collections.OrderedDict()
ensure_task['file']['path'] = \ ensure_task['file']['path'] = \
"{{ zuul.executor.work_root }}/artifacts" "{{ zuul.executor.work_root }}/artifacts"
ensure_task['file']['state'] = 'directory' ensure_task['file']['state'] = 'directory'
ensure_task['delegate_to'] = 'localhost' ensure_task['delegate_to'] = 'localhost'
tasks.insert(0, ensure_task) tasks.insert(0, ensure_task)

View File

@ -66,7 +66,7 @@ class WebServer(zuul.cmd.ZuulDaemonApp):
try: try:
self.web = zuul.web.ZuulWeb(**params) self.web = zuul.web.ZuulWeb(**params)
except Exception as e: except Exception:
self.log.exception("Error creating ZuulWeb:") self.log.exception("Error creating ZuulWeb:")
sys.exit(1) sys.exit(1)

View File

@ -22,13 +22,12 @@ import os
import psutil import psutil
import pwd import pwd
import shlex import shlex
import subprocess
import sys import sys
import threading import threading
import re import re
import struct import struct
from typing import Dict, List # flake8: noqa from typing import Dict, List # noqa
from zuul.driver import (Driver, WrapperInterface) from zuul.driver import (Driver, WrapperInterface)
from zuul.execution_context import BaseExecutionContext from zuul.execution_context import BaseExecutionContext
@ -171,7 +170,7 @@ class BubblewrapDriver(Driver, WrapperInterface):
log = logging.getLogger("zuul.BubblewrapDriver") log = logging.getLogger("zuul.BubblewrapDriver")
name = 'bubblewrap' name = 'bubblewrap'
release_file_re = re.compile('^\W+-release$') release_file_re = re.compile(r'^\W+-release$')
def __init__(self): def __init__(self):
self.bwrap_command = self._bwrap_command() self.bwrap_command = self._bwrap_command()
@ -258,7 +257,7 @@ def main(args=None):
if cli_args.secret: if cli_args.secret:
for secret in cli_args.secret: for secret in cli_args.secret:
fn, content = secret.split('=', 1) fn, content = secret.split('=', 1)
secrets[fn]=content secrets[fn] = content
context = driver.getExecutionContext( context = driver.getExecutionContext(
cli_args.ro_paths, cli_args.rw_paths, cli_args.ro_paths, cli_args.rw_paths,

View File

@ -27,13 +27,13 @@ class GitSource(BaseSource):
hostname, config) hostname, config)
def getRefSha(self, project, ref): def getRefSha(self, project, ref):
raise NotImplemented() raise NotImplementedError()
def isMerged(self, change, head=None): def isMerged(self, change, head=None):
raise NotImplemented() raise NotImplementedError()
def canMerge(self, change, allow_needs): def canMerge(self, change, allow_needs):
raise NotImplemented() raise NotImplementedError()
def getChange(self, event, refresh=False): def getChange(self, event, refresh=False):
return self.connection.getChange(event, refresh) return self.connection.getChange(event, refresh)
@ -61,7 +61,7 @@ class GitSource(BaseSource):
return self.connection.getGitUrl(project) return self.connection.getGitUrl(project)
def getProjectOpenChanges(self, project): def getProjectOpenChanges(self, project):
raise NotImplemented() raise NotImplementedError()
def getRequireFilters(self, config): def getRequireFilters(self, config):
return [] return []
@ -70,4 +70,4 @@ class GitSource(BaseSource):
return [] return []
def getRefForChange(self, change): def getRefForChange(self, change):
raise NotImplemented() raise NotImplementedError()

View File

@ -992,7 +992,7 @@ class AnsibleJob(object):
def doMergeChanges(self, merger, items, repo_state): def doMergeChanges(self, merger, items, repo_state):
try: try:
ret = merger.mergeChanges(items, repo_state=repo_state) ret = merger.mergeChanges(items, repo_state=repo_state)
except ValueError as e: except ValueError:
# Return ABORTED so that we'll try again. At this point all of # Return ABORTED so that we'll try again. At this point all of
# the refs we're trying to merge should be valid refs. If we # the refs we're trying to merge should be valid refs. If we
# can't fetch them, it should resolve itself. # can't fetch them, it should resolve itself.

View File

@ -67,8 +67,8 @@ def timeout_handler(path):
class Repo(object): class Repo(object):
commit_re = re.compile('^commit ([0-9a-f]{40})$') commit_re = re.compile(r'^commit ([0-9a-f]{40})$')
diff_re = re.compile('^@@ -\d+,\d \+(\d+),\d @@$') diff_re = re.compile(r'^@@ -\d+,\d \+(\d+),\d @@$')
def __init__(self, remote, local, email, username, speed_limit, speed_time, def __init__(self, remote, local, email, username, speed_limit, speed_time,
sshkey=None, cache_path=None, logger=None, git_timeout=300, sshkey=None, cache_path=None, logger=None, git_timeout=300,
@ -158,7 +158,7 @@ class Repo(object):
mygit.clone(git.cmd.Git.polish_url(url), self.local_path, mygit.clone(git.cmd.Git.polish_url(url), self.local_path,
kill_after_timeout=self.git_timeout) kill_after_timeout=self.git_timeout)
break break
except Exception as e: except Exception:
if attempt < self.retry_attempts: if attempt < self.retry_attempts:
time.sleep(self.retry_interval) time.sleep(self.retry_interval)
self.log.warning("Retry %s: Clone %s" % ( self.log.warning("Retry %s: Clone %s" % (