Bump hacking
hacking 3.0.x is too old. Also remove the note about pip's behavior which was already fixed in recent versions. Change-Id: I65d350943649c3346ed5741631c01724ddd256ef
This commit is contained in:
parent
b68c105696
commit
44cd95684b
@ -254,8 +254,8 @@ class DefaultEngine(base.Engine):
|
||||
db_api.update_action_execution_heartbeat(exec_id)
|
||||
except exceptions.DBEntityNotFoundError:
|
||||
LOG.debug(
|
||||
"Action execution heartbeat update failed. {}"
|
||||
.format(exec_id),
|
||||
"Action execution heartbeat update failed. {}",
|
||||
exec_id,
|
||||
exc_info=True
|
||||
)
|
||||
# Ignore this error and continue with the
|
||||
|
@ -68,24 +68,24 @@ def check_oslo_namespace_imports(logical_line):
|
||||
msg = ("O323: '%s' must be used instead of '%s'.") % (
|
||||
logical_line.replace('oslo.', 'oslo_'),
|
||||
logical_line)
|
||||
yield(0, msg)
|
||||
yield (0, msg)
|
||||
elif re.match(oslo_namespace_imports_from_root, logical_line):
|
||||
msg = ("O323: '%s' must be used instead of '%s'.") % (
|
||||
logical_line.replace('from oslo import ', 'import oslo_'),
|
||||
logical_line)
|
||||
yield(0, msg)
|
||||
yield (0, msg)
|
||||
elif re.match(oslo_namespace_imports_dot, logical_line):
|
||||
msg = ("O323: '%s' must be used instead of '%s'.") % (
|
||||
logical_line.replace('import', 'from').replace('.', ' import '),
|
||||
logical_line)
|
||||
yield(0, msg)
|
||||
yield (0, msg)
|
||||
|
||||
|
||||
@core.flake8ext
|
||||
def check_python3_xrange(logical_line):
|
||||
if re.search(r"\bxrange\s*\(", logical_line):
|
||||
yield(0, "M327: Do not use xrange(). 'xrange()' is not compatible "
|
||||
"with Python 3. Use range() or range() instead.")
|
||||
yield (0, "M327: Do not use xrange(). 'xrange()' is not compatible "
|
||||
"with Python 3. Use range() or range() instead.")
|
||||
|
||||
|
||||
@core.flake8ext
|
||||
@ -93,7 +93,7 @@ def check_python3_no_iteritems(logical_line):
|
||||
msg = ("M328: Use six.iteritems() instead of dict.iteritems().")
|
||||
|
||||
if re.search(r".*\.iteritems\(\)", logical_line):
|
||||
yield(0, msg)
|
||||
yield (0, msg)
|
||||
|
||||
|
||||
@core.flake8ext
|
||||
@ -101,7 +101,7 @@ def check_python3_no_iterkeys(logical_line):
|
||||
msg = ("M329: Use six.iterkeys() instead of dict.iterkeys().")
|
||||
|
||||
if re.search(r".*\.iterkeys\(\)", logical_line):
|
||||
yield(0, msg)
|
||||
yield (0, msg)
|
||||
|
||||
|
||||
@core.flake8ext
|
||||
@ -109,7 +109,7 @@ def check_python3_no_itervalues(logical_line):
|
||||
msg = ("M330: Use six.itervalues() instead of dict.itervalues().")
|
||||
|
||||
if re.search(r".*\.itervalues\(\)", logical_line):
|
||||
yield(0, msg)
|
||||
yield (0, msg)
|
||||
|
||||
|
||||
class BaseASTChecker(ast.NodeVisitor):
|
||||
|
@ -72,7 +72,7 @@ class KombuRPCListener(ConsumerMixin):
|
||||
:param message: the plain amqp kombu.message with additional
|
||||
information
|
||||
"""
|
||||
LOG.debug("Got response: {0}".format(response))
|
||||
LOG.debug("Got response: {}", response)
|
||||
|
||||
try:
|
||||
message.ack()
|
||||
@ -97,7 +97,7 @@ class KombuRPCListener(ConsumerMixin):
|
||||
else:
|
||||
LOG.debug(
|
||||
"Got a response, but seems like no process is waiting for "
|
||||
"it [correlation_id={0}]".format(correlation_id)
|
||||
"it [correlation_id={}]", correlation_id
|
||||
)
|
||||
|
||||
def get_result(self, correlation_id, timeout):
|
||||
|
@ -147,8 +147,8 @@ class KombuRPCServer(rpc_base.RPCServer, kombu_base.Base):
|
||||
self.stop()
|
||||
|
||||
LOG.info(
|
||||
"Server with id='{}' stopped."
|
||||
.format(self.server_id)
|
||||
"Server with id='{}' stopped.",
|
||||
self.server_id
|
||||
)
|
||||
|
||||
return
|
||||
|
@ -51,14 +51,12 @@ def handle_expired_actions():
|
||||
CONF.action_heartbeat.batch_size
|
||||
)
|
||||
|
||||
LOG.debug("Found {} running and expired actions.".format(
|
||||
len(action_exs))
|
||||
)
|
||||
LOG.debug("Found {} running and expired actions.", len(action_exs))
|
||||
|
||||
if action_exs:
|
||||
LOG.info(
|
||||
"Actions executions to transit to error, because "
|
||||
"heartbeat wasn't received: {}".format(action_exs)
|
||||
"heartbeat wasn't received: {}", action_exs
|
||||
)
|
||||
|
||||
for action_ex in action_exs:
|
||||
@ -131,7 +129,7 @@ def start():
|
||||
LOG.debug(
|
||||
"First run of action heartbeat checker, wait before "
|
||||
"checking to make sure executors have time to send "
|
||||
"heartbeats. ({} seconds)".format(wait_time)
|
||||
"heartbeats. ({} seconds)", wait_time
|
||||
)
|
||||
|
||||
global _stopped
|
||||
|
@ -56,8 +56,8 @@ def pause_running_executions(skip_tx=False):
|
||||
db_api.get_workflow_executions(state=states.RUNNING,
|
||||
insecure=True)]
|
||||
|
||||
LOG.info("Number of find workflow executions is {}"
|
||||
.format(len(execution_ids)))
|
||||
LOG.info("Number of find workflow executions is {}",
|
||||
len(execution_ids))
|
||||
|
||||
if skip_tx:
|
||||
sched = sched_base.get_system_scheduler()
|
||||
@ -112,7 +112,7 @@ def _pause_execution(wf_ex_id, project_id, skip_tx=False):
|
||||
|
||||
if states.is_running(wf_ex.state):
|
||||
workflow_handler.pause_workflow(wf_ex)
|
||||
LOG.info('Execution {} was paused'.format(wf_ex_id))
|
||||
LOG.info('Execution {} was paused', wf_ex_id)
|
||||
|
||||
|
||||
def await_pause_executions(skip_tx=False):
|
||||
@ -139,9 +139,9 @@ def await_pause_executions(skip_tx=False):
|
||||
db_api.update_maintenance_status(PAUSED)
|
||||
return
|
||||
|
||||
LOG.info('The following tasks have RUNNING state: {}'.format([
|
||||
LOG.info('The following tasks have RUNNING state: {}', [
|
||||
task.id for task in tasks
|
||||
]))
|
||||
])
|
||||
|
||||
sched = sched_base.get_system_scheduler()
|
||||
job = sched_base.SchedulerJob(
|
||||
@ -166,9 +166,9 @@ def await_pause_executions(skip_tx=False):
|
||||
if not tasks:
|
||||
return True
|
||||
|
||||
LOG.info('The following tasks have RUNNING state: {}'.format([
|
||||
LOG.info('The following tasks have RUNNING state: {}', [
|
||||
task.id for task in tasks
|
||||
]))
|
||||
])
|
||||
|
||||
eventlet.sleep(1)
|
||||
|
||||
@ -284,6 +284,4 @@ def _resume_execution(wf_ex_id, skip_tx=False):
|
||||
|
||||
workflow_handler.resume_workflow(wf_ex)
|
||||
|
||||
LOG.info('The following execution was resumed: {}'.format([
|
||||
wf_ex.id
|
||||
]))
|
||||
LOG.info('The following execution was resumed: {}', [wf_ex.id])
|
||||
|
@ -110,7 +110,7 @@ class TestCronTriggerController(base.APITest):
|
||||
resp = self.app.get('/v2/cron_triggers/my_cron_trigger')
|
||||
|
||||
self.assertEqual(200, resp.status_int)
|
||||
self.assertTrue('project_id' in resp.json)
|
||||
self.assertIn('project_id', resp.json)
|
||||
|
||||
@mock.patch.object(db_api, "get_cron_trigger", MOCK_NOT_FOUND)
|
||||
def test_get_not_found(self):
|
||||
|
@ -224,7 +224,7 @@ class TestExecutionsController(base.APITest):
|
||||
resp = self.app.get('/v2/executions/123', expect_errors=True)
|
||||
|
||||
self.assertEqual(200, resp.status_int)
|
||||
self.assertTrue('project_id' in resp.json)
|
||||
self.assertIn('project_id', resp.json)
|
||||
|
||||
@mock.patch.object(
|
||||
db_api,
|
||||
|
@ -242,7 +242,7 @@ class TestTasksController(base.APITest):
|
||||
resp = self.app.get('/v2/tasks/123')
|
||||
|
||||
self.assertEqual(200, resp.status_int)
|
||||
self.assertTrue('project_id' in resp.json)
|
||||
self.assertIn('project_id', resp.json)
|
||||
|
||||
@mock.patch.object(db_api, 'get_task_executions', MOCK_EMPTY)
|
||||
def test_get_all_empty(self):
|
||||
|
@ -213,7 +213,7 @@ class TestWorkbooksController(base.APITest):
|
||||
resp = self.app.get('/v2/workbooks/123')
|
||||
|
||||
self.assertEqual(200, resp.status_int)
|
||||
self.assertTrue('project_id' in resp.json)
|
||||
self.assertIn('project_id', resp.json)
|
||||
|
||||
@mock.patch.object(workbooks, "update_workbook_v2", MOCK_UPDATED_WORKBOOK)
|
||||
def test_put(self):
|
||||
|
@ -232,7 +232,7 @@ class WorkflowController(object):
|
||||
|
||||
@abc.abstractmethod
|
||||
def evaluate_workflow_final_context(self):
|
||||
"""Evaluates final workflow context assuming that workflow has finished.
|
||||
"""Evaluates final workflow context after workflow has finished.
|
||||
|
||||
:return: Final workflow context.
|
||||
"""
|
||||
|
@ -1,7 +1,3 @@
|
||||
# The order of packages is significant, because pip processes them in the order
|
||||
# of appearance. Changing the order has an impact on the overall integration
|
||||
# process, which may cause wedges in the gate later.
|
||||
|
||||
alembic>=0.9.6 # MIT
|
||||
croniter>=0.3.4 # MIT License
|
||||
cachetools>=2.0.0 # MIT License
|
||||
|
@ -1,7 +1,4 @@
|
||||
# The order of packages is significant, because pip processes them in the order
|
||||
# of appearance. Changing the order has an impact on the overall integration
|
||||
# process, which may cause wedges in the gate later.
|
||||
hacking>=3.0.1,<3.1.0 # Apache-2.0
|
||||
hacking>=6.1.0,<6.2.0 # Apache-2.0
|
||||
|
||||
coverage!=4.4,>=4.0 # Apache-2.0
|
||||
doc8>=0.8.1 # Apache-2.0
|
||||
|
Loading…
Reference in New Issue
Block a user